def test_handle(self, allow_build, handle_auto_rebuild, prepare_builds,
                    start_to_build_images):
        event = BotasErrataShippedEvent("test_msg_id", self.botas_advisory)
        db_event = Event.get_or_create_from_event(db.session, event)
        allow_build.return_value = True
        handle_auto_rebuild.return_value = [{"bundle": 1}, {"bundle": 2}]
        prepare_builds.return_value = [
            ArtifactBuild.create(db.session,
                                 db_event,
                                 "ed0",
                                 "image",
                                 1234,
                                 original_nvr="some_name-2-12345",
                                 rebuilt_nvr="some_name-2-12346"),
            ArtifactBuild.create(db.session,
                                 db_event,
                                 "ed0",
                                 "image",
                                 12345,
                                 original_nvr="some_name_2-2-2",
                                 rebuilt_nvr="some_name_2-2-210")
        ]

        self.handler.handle(event)

        self.handler._prepare_builds.assert_called_once()
        self.assertEqual(self.handler._prepare_builds.call_args[0][0],
                         db_event)
        self.assertEqual(self.handler._prepare_builds.call_args[0][1],
                         [{
                             "bundle": 1
                         }, {
                             "bundle": 2
                         }])
 def test_get_or_create_from_event(self):
     event = events.TestingEvent('msg-1')
     # First call creates new event, second call returns the same one.
     for i in range(2):
         db_event = Event.get_or_create_from_event(db.session, event)
         self.assertEqual(db_event.id, 1)
         self.assertEqual(db_event.message_id, 'msg-1')
Esempio n. 3
0
    def handle(self, event):
        if event.dry_run:
            self.force_dry_run()

        db_event = Event.get_or_create_from_event(db.session, event)

        self.set_context(db_event)

        # Check if we are allowed to build this advisory.
        if not event.is_allowed(self):
            msg = ("Errata advisory {0} is not allowed by internal policy "
                   "to trigger Bob rebuilds.".format(event.advisory.errata_id))
            db_event.transition(EventState.SKIPPED, msg)
            db.session.commit()
            self.log_info(msg)
            return []

        self.rebuild_images_depending_on_advisory(db_event,
                                                  event.advisory.errata_id)
    def handle(self, event):
        if event.dry_run:
            self.force_dry_run()
        self.event = event

        db_event = Event.get_or_create_from_event(db.session, event)

        self.set_context(db_event)

        # Check if event is allowed by internal policies
        if not self.event.is_allowed(self):
            msg = ("This image rebuild is not allowed by internal policy. "
                   f"message_id: {event.msg_id}")
            db_event.transition(EventState.SKIPPED, msg)
            self.log_info(msg)
            return []

        if isinstance(event, ManualBundleRebuild):
            bundles_to_rebuild = self._handle_manual_rebuild(db_event)
        else:
            bundles_to_rebuild = self._handle_auto_rebuild(db_event)

        if not bundles_to_rebuild:
            return []

        builds = self._prepare_builds(db_event, bundles_to_rebuild)

        # Reset context to db_event.
        self.set_context(db_event)

        self.start_to_build_images(builds)
        msg = f"Advisory {db_event.search_key}: Rebuilding " \
              f"{len(db_event.builds.all())} bundle images."
        db_event.transition(EventState.BUILDING, msg)

        return []
    def test_get_pullspecs_mapping(self):
        event = ManualBundleRebuild(
            "test_msg_id",
            container_images=[],
            bundle_images=["bundle_image_1", "bundle_image_2"])
        event2 = BotasErrataShippedEvent("test_msg_id", self.botas_advisory)
        db_event = Event.get_or_create_from_event(db.session, event2)
        build = ArtifactBuild.create(db.session,
                                     db_event,
                                     "ed0",
                                     "image",
                                     1234,
                                     rebuilt_nvr="bundle_image_1")
        build.bundle_pullspec_overrides = {
            "pullspec_replacements": [{
                "new": "some_pullspec",
                "original": "original_pullspec",
                "pinned": True
            }, {
                "new": "new_pullspec",
                "original": "original_pullspec",
                "pinned": True,
                "_old": "old_pullspec"
            }]
        }
        self.handler.event = event
        db.session.commit()

        with self.assertLogs("freshmaker", "WARNING") as log:
            pullspec_map = self.handler._get_pullspecs_mapping()

        expected_map = {"old_pullspec": "new_pullspec"}
        self.assertTrue(
            "Can't find build for a bundle image \"bundle_image_2\"" in
            log.output[0])
        self.assertEqual(pullspec_map, expected_map)
Esempio n. 6
0
    def handle(self, event):
        """
        Rebuilds all container images which contain packages from the Errata
        advisory.
        """

        if event.dry_run:
            self.force_dry_run()

        self.event = event

        # Generate the Database representation of `event`, it can be
        # triggered by user, we want to track what happened

        db_event = Event.get_or_create_from_event(db.session, event)

        db.session.commit()
        self.set_context(db_event)

        # Check if we are allowed to build this advisory.
        if not self.event.is_allowed(self):
            msg = ("Errata advisory {0} is not allowed by internal policy "
                   "to trigger rebuilds.".format(event.advisory.errata_id))
            db_event.transition(EventState.SKIPPED, msg)
            db.session.commit()
            self.log_info(msg)
            return []

        # Get and record all images to rebuild based on the current
        # ErrataAdvisoryRPMsSignedEvent event.
        batches = self._find_images_to_rebuild(db_event.search_key)
        builds = self._record_batches(batches, event)

        if not builds:
            msg = 'No container images to rebuild for advisory %r' % event.advisory.name
            self.log_info(msg)
            db_event.transition(EventState.SKIPPED, msg)
            db.session.commit()
            return []

        if all([
                build.state == ArtifactBuildState.FAILED.value
                for build in builds.values()
        ]):
            db_event.transition(
                EventState.COMPLETE,
                "No container images to rebuild, all are in failed state.")
            db.session.commit()
            return []

        if event.advisory.state != 'SHIPPED_LIVE':
            # If freshmaker is configured to rebuild images only when advisory
            # moves to SHIPPED_LIVE state, there is no need to generate new
            # composes for rebuild as all signed RPMs should already be
            # available from official YUM repositories.
            #
            # Generate the ODCS compose with RPMs from the current advisory.
            repo_urls = self.odcs.prepare_yum_repos_for_rebuilds(db_event)
            self.log_info(
                "Following repositories will be used for the rebuild:")
            for url in repo_urls:
                self.log_info("   - %s", url)

        # Log what we are going to rebuild
        self._check_images_to_rebuild(db_event, builds)
        self.start_to_build_images(
            db_event.get_image_builds_in_first_batch(db.session))

        msg = 'Advisory %s: Rebuilding %d container images.' % (
            db_event.search_key, len(db_event.builds.all()))
        db_event.transition(EventState.BUILDING, msg)

        return []
Esempio n. 7
0
    def _record_batches(self, batches, event, builds=None):
        """
        Records the images from batches to database.

        :param batches list: Output of LightBlue._find_images_to_rebuild(...).
        :param event ErrataAdvisoryRPMsSignedEvent: The event this handler
            is currently handling.
        :param builds dict: mappings from docker image build NVR to
            corresponding ArtifactBuild object, e.g.
            ``{brew_build_nvr: ArtifactBuild, ...}``. Previous builds returned
            from this method can be passed to this call to be extended by
            adding a new mappings after docker image is stored into database.
            For the first time to call this method, builds could be None.
        :return: a mapping between docker image build NVR and
            corresponding ArtifactBuild object representing a future rebuild of
            that docker image. It is extended by including those docker images
            stored into database.
        :rtype: dict
        """
        db_event = Event.get_or_create_from_event(db.session, event)

        # Used as tmp dict with {brew_build_nvr: ArtifactBuild, ...} mapping.
        builds = builds or {}

        # Cache for ODCS pulp composes. Key is white-spaced, sorted, list
        # of content_sets. Value is Compose database object.
        odcs_cache = {}

        for batch in batches:
            for image in batch:
                # Reset context to db_event for each iteration before
                # the ArtifactBuild is created.
                self.set_context(db_event)

                nvr = image.nvr
                if nvr in builds:
                    self.log_debug(
                        "Skipping recording build %s, "
                        "it is already in db", nvr)
                    continue

                parent_build = db_event.get_artifact_build_from_event_dependencies(
                    nvr)
                if parent_build:
                    self.log_debug(
                        "Skipping recording build %s, "
                        "it is already built in dependant event %r", nvr,
                        parent_build[0].event_id)
                    continue

                self.log_debug("Recording %s", nvr)
                parent_nvr = image["parent"].nvr \
                    if "parent" in image and image["parent"] else None
                dep_on = builds[parent_nvr] if parent_nvr in builds else None

                if parent_nvr:
                    build = db_event.get_artifact_build_from_event_dependencies(
                        parent_nvr)
                    if build:
                        parent_nvr = build[0].rebuilt_nvr
                        dep_on = None

                if "error" in image and image["error"]:
                    state_reason = image["error"]
                    state = ArtifactBuildState.FAILED.value
                elif dep_on and dep_on.state == ArtifactBuildState.FAILED.value:
                    # If this artifact build depends on a build which cannot
                    # be built by Freshmaker, mark this one as failed too.
                    state_reason = "Cannot build artifact, because its " \
                        "dependency cannot be built."
                    state = ArtifactBuildState.FAILED.value
                else:
                    state_reason = ""
                    state = ArtifactBuildState.PLANNED.value

                image_name = koji.parse_NVR(image.nvr)["name"]

                # Only released images are considered as directly affected for
                # rebuild. If some image is not in the latest released version and
                # it is included in a rebuild, it must be just a dependency of
                # other image.
                if image.get('directly_affected'):
                    rebuild_reason = RebuildReason.DIRECTLY_AFFECTED.value
                else:
                    rebuild_reason = RebuildReason.DEPENDENCY.value

                build = self.record_build(
                    event,
                    image_name,
                    ArtifactType.IMAGE,
                    dep_on=dep_on,
                    state=ArtifactBuildState.PLANNED.value,
                    original_nvr=nvr,
                    rebuild_reason=rebuild_reason)

                # Set context to particular build so logging shows this build
                # in case of error.
                self.set_context(build)

                build.transition(state, state_reason)

                build.build_args = json.dumps({
                    "repository":
                    image["repository"],
                    "commit":
                    image["commit"],
                    "original_parent":
                    parent_nvr,
                    "target":
                    image["target"],
                    "branch":
                    image["git_branch"],
                    "arches":
                    image["arches"],
                    "renewed_odcs_compose_ids":
                    image["odcs_compose_ids"],
                })

                db.session.commit()

                if state != ArtifactBuildState.FAILED.value:
                    # Store odcs pulp compose to build.
                    # Also generate pulp repos in case the image is unpublished,
                    # because in this case, we have to generate extra ODCS compose
                    # with all the RPMs in the image anyway later. And OSBS works
                    # in a way that we have to pass all the ODCS composes to it or
                    # no ODCS compose at all.
                    if image["generate_pulp_repos"] or not image["published"]:
                        # Check if the compose for these content_sets is
                        # already cached and use it in this case.
                        cache_key = " ".join(sorted(image["content_sets"]))
                        if cache_key in odcs_cache:
                            db_compose = odcs_cache[cache_key]
                        else:
                            compose = self.odcs.prepare_pulp_repo(
                                build, image["content_sets"])

                            if build.state != ArtifactBuildState.FAILED.value:
                                db_compose = Compose(
                                    odcs_compose_id=compose['id'])
                                db.session.add(db_compose)
                                db.session.commit()
                                odcs_cache[cache_key] = db_compose
                            else:
                                db_compose = None
                                db.session.commit()
                        if db_compose:
                            build.add_composes(db.session, [db_compose])
                            db.session.commit()

                    # Unpublished images can contain unreleased RPMs, so generate
                    # the ODCS compose with all the RPMs in the image to allow
                    # installation of possibly unreleased RPMs.
                    if not image["published"]:
                        compose = self.odcs.prepare_odcs_compose_with_image_rpms(
                            image)
                        if compose:
                            db_compose = Compose(odcs_compose_id=compose['id'])
                            db.session.add(db_compose)
                            db.session.commit()
                            build.add_composes(db.session, [db_compose])
                            db.session.commit()

                builds[nvr] = build

        # Reset context to db_event.
        self.set_context(db_event)

        return builds
    def handle(self, event):
        if event.dry_run:
            self.force_dry_run()
        self.event = event

        db_event = Event.get_or_create_from_event(db.session, event)

        self.set_context(db_event)

        # Check if event is allowed by internal policies
        if not self.event.is_allowed(self):
            msg = ("This image rebuild is not allowed by internal policy. "
                   f"message_id: {event.msg_id}")
            db_event.transition(EventState.SKIPPED, msg)
            self.log_info(msg)
            return []

        # Mapping of original build nvrs to rebuilt nvrs in advisory
        nvrs_mapping = self._create_original_to_rebuilt_nvrs_map()

        original_nvrs = nvrs_mapping.keys()
        self.log_info(
            "Orignial nvrs of build in the advisory #{0} are: {1}".format(
                event.advisory.errata_id, " ".join(original_nvrs)))

        # Get image manifest_list_digest for all original images, manifest_list_digest is used
        # in pullspecs in bundle's related images
        original_digests_by_nvr = {}
        original_nvrs_by_digest = {}
        for nvr in original_nvrs:
            digest = self._pyxis.get_manifest_list_digest_by_nvr(nvr)
            if digest:
                original_digests_by_nvr[nvr] = digest
                original_nvrs_by_digest[digest] = nvr
            else:
                log.warning(
                    f"Image manifest_list_digest not found for original image {nvr} in Pyxis, "
                    "skip this image")

        if not original_digests_by_nvr:
            msg = f"None of the original images have digests in Pyxis: {','.join(original_nvrs)}"
            log.warning(msg)
            db_event.transition(EventState.SKIPPED, msg)
            return []

        # Get image manifest_list_digest for all rebuilt images, manifest_list_digest is used
        # in pullspecs of bundle's related images
        rebuilt_digests_by_nvr = {}
        rebuilt_nvrs = nvrs_mapping.values()
        for nvr in rebuilt_nvrs:
            digest = self._pyxis.get_manifest_list_digest_by_nvr(nvr)
            if digest:
                rebuilt_digests_by_nvr[nvr] = digest
            else:
                log.warning(
                    f"Image manifest_list_digest not found for rebuilt image {nvr} in Pyxis, "
                    "skip this image")

        if not rebuilt_digests_by_nvr:
            msg = f"None of the rebuilt images have digests in Pyxis: {','.join(rebuilt_nvrs)}"
            log.warning(msg)
            db_event.transition(EventState.SKIPPED, msg)
            return []

        index_images = self._pyxis.get_operator_indices()
        # get latest bundle images per channel per index image filtered
        # by the highest semantic version
        all_bundles = self._pyxis.get_latest_bundles(index_images)

        # A mapping of digests to bundle metadata. This metadata is used to
        # for the CSV metadata updates.
        bundle_mds_by_digest = {}

        # get bundle digests for original images
        bundle_digests_by_related_nvr = {}
        for image_nvr, image_digest in original_digests_by_nvr.items():
            bundles = self._pyxis.get_bundles_by_related_image_digest(
                image_digest, all_bundles)
            if not bundles:
                log.info(
                    f"No latest bundle image with the related image of {image_nvr}"
                )
                continue

            for bundle in bundles:
                bundle_digest = bundle['bundle_path_digest']
                bundle_mds_by_digest[bundle_digest] = bundle
                bundle_digests_by_related_nvr.setdefault(
                    image_nvr, []).append(bundle_digest)

        if not bundle_digests_by_related_nvr:
            msg = "None of the original images have related bundles, skip."
            log.warning(msg)
            db_event.transition(EventState.SKIPPED, msg)
            return []

        # Mapping of bundle digest to bundle data
        # {
        #     digest: {
        #         "images": [image_amd64, image_aarch64],
        #         "nvr": NVR,
        #         "auto_rebuild": True/False,
        #         "osbs_pinning": True/False,
        #         "pullspecs": [...],
        #     }
        # }
        bundles_by_digest = {}
        default_bundle_data = {
            'images': [],
            'nvr': None,
            'auto_rebuild': False,
            'osbs_pinning': False,
            # CSV modifications for the rebuilt bundle image
            'pullspecs': [],
            'append': {},
            'update': {},
        }

        # Get images for each bundle digest, a bundle digest can have multiple images
        # with different arches.
        for digest in bundle_mds_by_digest:
            bundles = self._pyxis.get_images_by_digest(digest)
            # If no bundle image found, just skip this bundle digest
            if not bundles:
                continue

            bundles_by_digest.setdefault(digest,
                                         copy.deepcopy(default_bundle_data))
            bundles_by_digest[digest]['nvr'] = bundles[0]['brew']['build']
            bundles_by_digest[digest]['images'] = bundles

        # Unauthenticated koji session to fetch build info of bundles
        koji_api = KojiService(conf.koji_profile)

        # For each bundle, check whether it should be rebuilt by comparing the
        # auto_rebuild_tags of repository and bundle's tags
        for digest, bundle_data in bundles_by_digest.items():
            bundle_nvr = bundle_data['nvr']

            # Images are for different arches, just check against the first image
            image = bundle_data['images'][0]
            if self.image_has_auto_rebuild_tag(image):
                bundle_data['auto_rebuild'] = True

            # Fetch buildinfo
            buildinfo = koji_api.get_build(bundle_nvr)
            related_images = (buildinfo.get('extra', {}).get('image', {}).get(
                'operator_manifests', {}).get('related_images', {}))
            bundle_data['osbs_pinning'] = related_images.get(
                'created_by_osbs', False)
            # Save the original pullspecs
            bundle_data['pullspecs'] = related_images.get('pullspecs', [])

        # Digests of bundles to be rebuilt
        to_rebuild_digests = set()

        # Now for each bundle, replace the original digest with rebuilt
        # digest (override pullspecs)
        for digest, bundle_data in bundles_by_digest.items():
            # Override pullspecs only when auto_rebuild is enabled and OSBS-pinning
            # mechanism is used.
            if not (bundle_data['auto_rebuild']
                    and bundle_data['osbs_pinning']):
                continue

            csv_name = bundle_mds_by_digest[digest]['csv_name']
            version = bundle_mds_by_digest[digest]['version']
            bundle_data.update(self._get_csv_updates(csv_name, version))

            for pullspec in bundle_data['pullspecs']:
                # A pullspec item example:
                # {
                #   'new': 'registry.exampe.io/repo/example-operator@sha256:<sha256-value>'
                #   'original': 'registry.example.io/repo/example-operator:v2.2.0',
                #   'pinned': True
                # }

                # A pullspec path is in format of "registry/repository@digest"
                pullspec_elems = pullspec.get('new').split('@')
                old_digest = pullspec_elems[1]

                if old_digest not in original_nvrs_by_digest:
                    # This related image is not one of the original images
                    continue

                # This related image is one of our original images
                old_nvr = original_nvrs_by_digest[old_digest]
                new_nvr = nvrs_mapping[old_nvr]
                new_digest = rebuilt_digests_by_nvr[new_nvr]

                # Replace the old digest with new digest
                pullspec_elems[1] = new_digest
                new_pullspec = '@'.join(pullspec_elems)
                pullspec['new'] = new_pullspec
                # Always set pinned to True when it was replaced by Freshmaker
                # since it indicates that the pullspec was modified from the
                # original pullspec
                pullspec['pinned'] = True

                # Once a pullspec in this bundle has been overrided, add this bundle
                # to rebuild list
                to_rebuild_digests.add(digest)

        if not to_rebuild_digests:
            msg = f"No bundle images to rebuild for advisory {event.advisory.name}"
            self.log_info(msg)
            db_event.transition(EventState.SKIPPED, msg)
            db.session.commit()
            return []

        builds = self._prepare_builds(db_event, bundles_by_digest,
                                      to_rebuild_digests)

        # Reset context to db_event.
        self.set_context(db_event)

        self.start_to_build_images(builds)
        msg = f"Advisory {db_event.search_key}: Rebuilding " \
              f"{len(db_event.builds.all())} bundle images."
        db_event.transition(EventState.BUILDING, msg)

        return []
    def handle(self, event):
        if event.dry_run:
            self.force_dry_run()
        self.event = event

        db_event = Event.get_or_create_from_event(db.session, event)

        self.set_context(db_event)

        # Check if event is allowed by internal policies
        if not self.event.is_allowed(self):
            msg = ("This image rebuild is not allowed by internal policy. "
                   f"message_id: {event.msg_id}")
            db_event.transition(EventState.SKIPPED, msg)
            self.log_info(msg)
            return []

        # Get builds NVRs from the advisory attached to the message/event and
        # then get original NVR for every build

        # Mapping of original build nvrs to rebuilt nvrs in advisory
        nvrs_mapping = {}
        for product_info in event.advisory.builds.values():
            for build in product_info['builds']:
                # Search for the first build that triggered the chain of rebuilds
                # for every shipped NVR to get original NVR from it
                original_nvr = self.get_published_original_nvr(build['nvr'])
                if original_nvr is None:
                    continue
                nvrs_mapping[original_nvr] = build['nvr']

        original_nvrs = nvrs_mapping.keys()
        self.log_info(
            "Orignial nvrs of build in the advisory #{0} are: {1}".format(
                event.advisory.errata_id, " ".join(original_nvrs)))

        # Get image manifest_list_digest for all original images, manifest_list_digest is used
        # in pullspecs in bundle's related images
        original_digests_by_nvr = {}
        original_nvrs_by_digest = {}
        for nvr in original_nvrs:
            digest = self._pyxis.get_manifest_list_digest_by_nvr(nvr)
            if digest:
                original_digests_by_nvr[nvr] = digest
                original_nvrs_by_digest[digest] = nvr
            else:
                log.warning(
                    f"Image manifest_list_digest not found for original image {nvr} in Pyxis, "
                    "skip this image"
                )

        if not original_digests_by_nvr:
            msg = f"None of the original images have digests in Pyxis: {','.join(original_nvrs)}"
            log.warning(msg)
            db_event.transition(EventState.SKIPPED, msg)
            return []

        # Get image manifest_list_digest for all rebuilt images, manifest_list_digest is used
        # in pullspecs of bundle's related images
        rebuilt_digests_by_nvr = {}
        rebuilt_nvrs = nvrs_mapping.values()
        for nvr in rebuilt_nvrs:
            digest = self._pyxis.get_manifest_list_digest_by_nvr(nvr)
            if digest:
                rebuilt_digests_by_nvr[nvr] = digest
            else:
                log.warning(
                    f"Image manifest_list_digest not found for rebuilt image {nvr} in Pyxis, "
                    "skip this image"
                )

        if not rebuilt_digests_by_nvr:
            msg = f"None of the rebuilt images have digests in Pyxis: {','.join(rebuilt_nvrs)}"
            log.warning(msg)
            db_event.transition(EventState.SKIPPED, msg)
            return []

        index_images = self._pyxis.get_operator_indices()
        # get latest bundle images per channel per index image filtered
        # by the highest semantic version
        all_bundles = self._pyxis.get_latest_bundles(index_images)

        # A set of unique bundle digests
        bundle_digests = set()

        # get bundle digests for original images
        bundle_digests_by_related_nvr = {}
        for image_nvr, image_digest in original_digests_by_nvr.items():
            bundles = self._pyxis.get_bundles_by_related_image_digest(
                image_digest, all_bundles
            )
            if not bundles:
                log.info(f"No latest bundle image with the related image of {image_nvr}")
                continue

            for bundle in bundles:
                bundle_digest = bundle['bundle_path_digest']
                bundle_digests.add(bundle_digest)
                bundle_digests_by_related_nvr.setdefault(image_nvr, []).append(bundle_digest)

        if not bundle_digests_by_related_nvr:
            msg = "None of the original images have related bundles, skip."
            log.warning(msg)
            db_event.transition(EventState.SKIPPED, msg)
            return []

        # Mapping of bundle digest to bundle data
        # {
        #     digest: {
        #         "images": [image_amd64, image_aarch64],
        #         "nvr": NVR,
        #         "auto_rebuild": True/False,
        #         "osbs_pinning": True/False,
        #         "pullspecs": [...],
        #     }
        # }
        bundles_by_digest = {}
        default_bundle_data = {
            'images': [],
            'nvr': None,
            'auto_rebuild': False,
            'osbs_pinning': False,
            'pullspecs': [],
        }

        # Get images for each bundle digest, a bundle digest can have multiple images
        # with different arches.
        for digest in bundle_digests:
            bundles = self._pyxis.get_images_by_digest(digest)
            # If no bundle image found, just skip this bundle digest
            if not bundles:
                continue

            bundles_by_digest.setdefault(digest, copy.deepcopy(default_bundle_data))
            bundles_by_digest[digest]['nvr'] = bundles[0]['brew']['build']
            bundles_by_digest[digest]['images'] = bundles

        # Unauthenticated koji session to fetch build info of bundles
        koji_api = KojiService(conf.koji_profile)

        # For each bundle, check whether it should be rebuilt by comparing the
        # auto_rebuild_tags of repository and bundle's tags
        for digest, bundle_data in bundles_by_digest.items():
            bundle_nvr = bundle_data['nvr']

            # Images are for different arches, just check against the first image
            image = bundle_data['images'][0]
            if self.image_has_auto_rebuild_tag(image):
                bundle_data['auto_rebuild'] = True

            # Fetch buildinfo
            buildinfo = koji_api.get_build(bundle_nvr)
            related_images = (
                buildinfo.get("extra", {})
                .get("image", {})
                .get("operator_manifests", {})
                .get("related_images", {})
            )
            bundle_data['osbs_pinning'] = related_images.get('created_by_osbs', False)
            # Save the original pullspecs
            bundle_data['pullspecs'] = related_images.get('pullspecs', [])

        # Digests of bundles to be rebuilt
        to_rebuild_digests = set()

        # Now for each bundle, replace the original digest with rebuilt
        # digest (override pullspecs)
        for digest, bundle_data in bundles_by_digest.items():
            # Override pullspecs only when auto_rebuild is enabled and OSBS-pinning
            # mechanism is used.
            if not (bundle_data['auto_rebuild'] and bundle_data['osbs_pinning']):
                continue

            for pullspec in bundle_data['pullspecs']:
                # A pullspec item example:
                # {
                #   'new': 'registry.exampe.io/repo/example-operator@sha256:<sha256-value>'
                #   'original': 'registry.example.io/repo/example-operator:v2.2.0',
                #   'pinned': True
                # }

                # If related image is not pinned by OSBS, skip
                if not pullspec.get('pinned', False):
                    continue

                # A pullspec path is in format of "registry/repository@digest"
                pullspec_elems = pullspec.get('new').split('@')
                old_digest = pullspec_elems[1]

                if old_digest not in original_nvrs_by_digest:
                    # This related image is not one of the original images
                    continue

                # This related image is one of our original images
                old_nvr = original_nvrs_by_digest[old_digest]
                new_nvr = nvrs_mapping[old_nvr]
                new_digest = rebuilt_digests_by_nvr[new_nvr]

                # Replace the old digest with new digest
                pullspec_elems[1] = new_digest
                new_pullspec = '@'.join(pullspec_elems)
                pullspec['new'] = new_pullspec

                # Once a pullspec in this bundle has been overrided, add this bundle
                # to rebuild list
                to_rebuild_digests.add(digest)

        # Skip that event because we can't proceed with processing it.
        # TODO
        # Now when we have bundle images' nvrs we can procceed with rebuilding it
        msg = f"Skipping the rebuild of {len(to_rebuild_digests)} bundle images " \
              "due to being blocked on further implementation for now."
        db_event.transition(EventState.SKIPPED, msg)
        return []
    def test_handle_manual_rebuild(self, get_pullspecs_mapping, get_build,
                                   get_csv_updates):

        get_pullspecs_mapping.return_value = {
            "old_pullspec": "new_pullspec",
            "old_pullspec_2": "new_pullspec_2"
        }
        build_by_nvr = {
            "container_image_2_nvr": {
                "extra": {
                    "image": {
                        "operator_manifests": {
                            "related_images": {
                                "pullspecs": [{
                                    "new": "newer_pullspes",
                                    "original": "original_pullspec",
                                    "pinned": True,
                                }, {
                                    "new": "old_pullspec_2",
                                    "original": "original_pullspec_2",
                                    "pinned": True,
                                }]
                            }
                        }
                    }
                }
            }
        }
        bundle_pullspec_overrides = {
            "pullspec_replacements": [{
                "new": "old_pullspec",
                "original": "original_pullspec_3",
                "pinned": True
            }]
        }
        digest_by_nvr = {
            "container_image_1_nvr": "container_image_1_digest",
            "container_image_2_nvr": "container_image_2_digest",
        }
        bundle_by_digest = {
            "container_image_1_digest": [{
                "bundle_path_digest": "bundle_1",
                "csv_name": "image.1.2.5",
                "version": "1.2.5",
            }],
            "container_image_2_digest": [{
                "bundle_path_digest": "bundle_2",
                "csv_name": "image.1.2.5",
                "version": "1.2.5",
            }],
        }

        event = ManualBundleRebuild("test_msg_id",
                                    container_images=[
                                        "container_image_1_nvr",
                                        "container_image_2_nvr"
                                    ],
                                    bundle_images=[])
        db_event = Event.get_or_create_from_event(db.session, event)
        self.handler.event = event
        get_build.side_effect = lambda nvr: build_by_nvr[nvr]
        build = ArtifactBuild.create(db.session,
                                     db_event,
                                     "ed0",
                                     "image",
                                     1234,
                                     rebuilt_nvr="container_image_1_nvr")
        build.bundle_pullspec_overrides = bundle_pullspec_overrides

        def gmldbn(nvr, must_be_published=True):
            return digest_by_nvr[nvr]

        self.pyxis().get_manifest_list_digest_by_nvr.side_effect = gmldbn
        self.pyxis().get_bundles_by_digest.side_effect = \
            lambda digest: bundle_by_digest[digest]
        get_csv_updates.return_value = {"update": "csv_update_placeholder"}
        db.session.commit()

        bundles_to_rebuild = self.handler._handle_manual_rebuild(db_event)

        expected_bundles = [{
            "nvr":
            "container_image_1_nvr",
            "update":
            "csv_update_placeholder",
            "pullspec_replacements": [{
                "new": "new_pullspec",
                "original": "original_pullspec_3",
                "pinned": True
            }]
        }, {
            "nvr":
            "container_image_2_nvr",
            "update":
            "csv_update_placeholder",
            "pullspec_replacements": [{
                "new": "newer_pullspes",
                "original": "original_pullspec",
                "pinned": True,
            }, {
                "new": "new_pullspec_2",
                "original": "original_pullspec_2",
                "pinned": True,
            }],
        }]
        self.assertCountEqual(bundles_to_rebuild, expected_bundles)
        self.pyxis().get_manifest_list_digest_by_nvr.assert_has_calls(
            [call("container_image_1_nvr"),
             call("container_image_2_nvr")],
            any_order=True)
        self.assertEqual(self.pyxis().get_bundles_by_digest.call_count, 2)
        self.pyxis().get_bundles_by_digest.assert_has_calls([
            call("container_image_1_digest"),
            call("container_image_2_digest")
        ],
                                                            any_order=True)
    def test_handle_auto_rebuild(self):
        nvr_to_digest = {
            "original_1": "original_1_digest",
            "some_name-1-12345": "some_name-1-12345_digest",
            "original_2": "original_2_digest",
            "some_name_2-2-2": "some_name_2-2-2_digest",
        }
        bundles = [{
            "bundle_path_digest": "original_1_digest"
        }, {
            "bundle_path_digest": "some_name-1-12345_digest"
        }, {
            "bundle_path_digest": "original_2_digest"
        }, {
            "bundle_path_digest": "some_name_2-2-2_digest"
        }]
        bundles_with_related_images = {
            "original_1_digest": [
                {
                    "bundle_path_digest":
                    "bundle_with_related_images_1_digest",
                    "csv_name": "image.1.2.3",
                    "version": "1.2.3",
                },
            ],
            "original_2_digest": [
                {
                    "bundle_path_digest":
                    "bundle_with_related_images_2_digest",
                    "csv_name": "image.1.2.4",
                    "version": "1.2.4",
                },
            ]
        }
        image_by_digest = {
            "bundle_with_related_images_1_digest": {
                "brew": {
                    "build": "bundle1_nvr-1-1"
                }
            },
            "bundle_with_related_images_2_digest": {
                "brew": {
                    "build": "bundle2_nvr-1-1"
                }
            },
        }
        builds = {
            "bundle1_nvr-1-1": {
                "task_id": 1,
                "extra": {
                    "image": {
                        "operator_manifests": {
                            "related_images": {
                                "created_by_osbs":
                                True,
                                "pullspecs": [{
                                    "new":
                                    "registry/repo/operator1@original_1_digest",
                                    "original":
                                    "registry/repo/operator1:v2.2.0",
                                    "pinned": True,
                                }]
                            },
                        }
                    }
                }
            },
            "bundle2_nvr-1-1": {
                "task_id": 2,
                "extra": {
                    "image": {
                        "operator_manifests": {
                            "related_images": {
                                "created_by_osbs":
                                True,
                                "pullspecs": [{
                                    "new":
                                    "registry/repo/operator2@original_2_digest",
                                    "original":
                                    "registry/repo/operator2:v2.2.0",
                                    "pinned": True,
                                }]
                            },
                        }
                    }
                }
            }
        }

        event = BotasErrataShippedEvent("test_msg_id", self.botas_advisory)
        db_event = Event.get_or_create_from_event(db.session, event)
        self.handler.event = event
        self.handler._create_original_to_rebuilt_nvrs_map = \
            MagicMock(return_value={"original_1": "some_name-1-12345",
                                    "original_2": "some_name_2-2-2"})

        def gmldbn(nvr, must_be_published=True):
            return nvr_to_digest[nvr]

        self.pyxis().get_manifest_list_digest_by_nvr.side_effect = gmldbn
        self.pyxis().get_operator_indices.return_value = []
        self.pyxis().get_latest_bundles.return_value = bundles
        # return bundles for original images
        self.pyxis(
        ).get_bundles_by_related_image_digest.side_effect = lambda x, y: bundles_with_related_images[
            x]
        self.pyxis(
        ).get_images_by_digest.side_effect = lambda x: [image_by_digest[x]]
        self.handler.image_has_auto_rebuild_tag = MagicMock(return_value=True)
        get_build = self.patcher.patch(
            "freshmaker.kojiservice.KojiService.get_build")
        get_build.side_effect = lambda x: builds[x]

        now = datetime(year=2020, month=12, day=25, hour=0, minute=0, second=0)
        with freezegun.freeze_time(now):
            bundles_to_rebuild = self.handler._handle_auto_rebuild(db_event)

        self.assertNotEqual(db_event.state, EventState.SKIPPED.value)
        get_build.assert_has_calls(
            [call("bundle1_nvr-1-1"),
             call("bundle2_nvr-1-1")], any_order=True)
        bundles_by_digest = {
            "bundle_with_related_images_1_digest": {
                "auto_rebuild":
                True,
                "images": [{
                    "brew": {
                        "build": "bundle1_nvr-1-1"
                    }
                }],
                "nvr":
                "bundle1_nvr-1-1",
                "osbs_pinning":
                True,
                "pullspec_replacements": [{
                    "new":
                    "registry/repo/operator1@some_name-1-12345_digest",
                    "original":
                    "registry/repo/operator1:v2.2.0",
                    "pinned":
                    True,
                    "_old":
                    "registry/repo/operator1@original_1_digest"
                }],
                "update": {
                    "metadata": {
                        "name": "image.1.2.3-0.1608854400.p",
                        "annotations": {
                            "olm.substitutesFor": "1.2.3"
                        },
                    },
                    "spec": {
                        "version": "1.2.3+0.1608854400.p"
                    },
                },
            },
            "bundle_with_related_images_2_digest": {
                "auto_rebuild":
                True,
                "images": [{
                    "brew": {
                        "build": "bundle2_nvr-1-1"
                    }
                }],
                "nvr":
                "bundle2_nvr-1-1",
                "osbs_pinning":
                True,
                "pullspec_replacements": [{
                    "new":
                    "registry/repo/operator2@some_name_2-2-2_digest",
                    "original":
                    "registry/repo/operator2:v2.2.0",
                    "pinned":
                    True,
                    "_old":
                    "registry/repo/operator2@original_2_digest"
                }],
                "update": {
                    "metadata": {
                        "name": "image.1.2.4-0.1608854400.p",
                        "annotations": {
                            "olm.substitutesFor": "1.2.4"
                        },
                    },
                    "spec": {
                        "version": "1.2.4+0.1608854400.p"
                    },
                },
            },
        }
        self.assertCountEqual(bundles_to_rebuild,
                              list(bundles_by_digest.values()))
    def handle(self, event):
        if event.dry_run:
            self.force_dry_run()
        self.event = event

        db_event = Event.get_or_create_from_event(db.session, event)

        self.set_context(db_event)

        # Check if event is allowed by internal policies
        if not self.event.is_allowed(self):
            msg = ("This image rebuild is not allowed by internal policy. "
                   f"message_id: {event.msg_id}")
            db_event.transition(EventState.SKIPPED, msg)
            self.log_info(msg)
            return []

        # Get builds NVRs from the advisory attached to the message/event and
        # then get original NVR for every build
        original_nvrs = set()
        for product_info in event.advisory.builds.values():
            for build in product_info['builds']:
                # Search for the first build that triggered the chain of rebuilds
                # for every shipped NVR to get original NVR from it
                original_nvr = self.get_published_original_nvr(build['nvr'])
                if original_nvr is None:
                    continue
                original_nvrs.add(original_nvr)

        self.log_info(
            "Orignial nvrs of build in the advisory #{0} are: {1}".format(
                event.advisory.errata_id, " ".join(original_nvrs)))
        # Get images by nvrs and then get their digests
        original_images_digests = self._pyxis.get_digests_by_nvrs(
            original_nvrs)
        if not original_images_digests:
            msg = f"There are no digests for NVRs: {','.join(original_nvrs)}"
            log.warning(msg)
            db_event.transition(EventState.SKIPPED, msg)
            return []

        index_images = self._pyxis.get_operator_indices()
        # get latest bundle images per channel per index image filtered
        # by the highest semantic version
        all_bundles = self._pyxis.get_latest_bundles(index_images)

        bundles = self._pyxis.filter_bundles_by_related_image_digests(
            original_images_digests, all_bundles)
        bundle_digests = set()
        for bundle in bundles:
            if not bundle.get('bundle_path_digest'):
                log.warning("Bundle %s doesn't have 'bundle_path_digests' set",
                            bundle['bundle_path'])
                continue
            bundle_digests.add(bundle['bundle_path_digest'])
        bundle_images = self._pyxis.get_images_by_digests(bundle_digests)

        # Filter image nvrs that don't have or never had auto_rebuild tag
        # in repos, where image is published
        auto_rebuild_nvrs = self._pyxis.get_auto_rebuild_tagged_images(
            bundle_images)

        # get NVRs only of those bundles, which have OSBS pinning
        bundles_nvrs = self._filter_bundles_by_pinned_related_images(
            auto_rebuild_nvrs)

        # Skip that event because we can't proceed with processing it.
        # TODO
        # Now when we have bundle images' nvrs we can procceed with rebuilding it
        msg = f"Skipping the rebuild of {len(bundles_nvrs)} bundle images " \
              "due to being blocked on further implementation for now."
        db_event.transition(EventState.SKIPPED, msg)
        return []
Esempio n. 13
0
    def handle(self, event):
        """
        Rebuilds all container images requested by the user and the tree in between the
        requested images, if relationships between them are found.
        """

        if event.dry_run:
            self.force_dry_run()

        self.event = event

        db_event = Event.get_or_create_from_event(db.session, event)
        self.set_context(db_event)

        # Check if we are allowed to build this image.
        if not self.event.is_allowed(self, ArtifactType.IMAGE):
            msg = ("This image rebuild is not allowed by internal policy. "
                   f"message_id: {event.msg_id}")
            db_event.transition(EventState.SKIPPED, msg)
            db.session.commit()
            self.log_info(msg)
            return []

        lb = self.init_lightblue_instance()
        # images contains at this point a list of images with all NVR for the same package
        images = self._find_images_to_rebuild(lb)

        # Since the input is an image name, and not an NVR, freshmaker won't be able to know
        # exactly which one needs to be rebuilt. For this reason Freshmaker asked Lightblue
        # all the NVRs that match that image name. Now we need to check which one has the
        # dist_git_branch. If more than one is found Freshmaker will choose the one with the
        # highest NVR.
        images = self.filter_images_based_on_dist_git_branch(images, db_event)
        if not images:
            return []

        # If the user requested to rebuild only one image, there's no need to find out all the tree
        # it is just more efficient to return that single image
        if len(images) == 1:
            batches = [images]
        else:
            images_trees = self.find_images_trees_to_rebuild(images, lb)
            to_rebuild = self.filter_out_unrelated_images(images_trees)
            batches = self.generate_batches(to_rebuild, images, lb)

        builds = self._record_batches(batches, db_event, lb)

        if not builds:
            msg = f"No container images to rebuild for event with message_id {event.msg_id}"
            self.log_info(msg)
            db_event.transition(EventState.SKIPPED, msg)
            db.session.commit()
            return []

        if all([
                build.state == ArtifactBuildState.FAILED.value
                for build in builds.values()
        ]):
            db_event.transition(
                EventState.COMPLETE,
                "No container images to rebuild, all are in failed state.")
            db.session.commit()
            return []

        self.start_to_build_images(
            db_event.get_image_builds_in_first_batch(db.session))

        msg = 'Rebuilding %d container images.' % (len(db_event.builds.all()))
        db_event.transition(EventState.BUILDING, msg)

        return []
Esempio n. 14
0
    def test_handle(self):
        event = BotasErrataShippedEvent("test_msg_id", self.botas_advisory)
        self.handler.allow_build = MagicMock(return_value=True)
        self.handler._create_original_to_rebuilt_nvrs_map = \
            MagicMock(return_value={"original_1": "some_name-1-12345",
                                    "original_2": "some_name_2-2-2"})
        nvr_to_digest = {
            "original_1": "original_1_digest",
            "some_name-1-12345": "some_name-1-12345_digest",
            "original_2": "original_2_digest",
            "some_name_2-2-2": "some_name_2-2-2_digest",
        }
        bundles = [{
            "bundle_path_digest": "original_1_digest"
        }, {
            "bundle_path_digest": "some_name-1-12345_digest"
        }, {
            "bundle_path_digest": "original_2_digest"
        }, {
            "bundle_path_digest": "some_name_2-2-2_digest"
        }]
        bundles_with_related_images = {
            "original_1_digest": [
                {
                    "bundle_path_digest":
                    "bundle_with_related_images_1_digest",
                    "csv_name": "image.1.2.3",
                    "version": "1.2.3",
                },
            ],
            "original_2_digest": [
                {
                    "bundle_path_digest":
                    "bundle_with_related_images_2_digest",
                    "csv_name": "image.1.2.4",
                    "version": "1.2.4",
                },
            ]
        }

        image_by_digest = {
            "bundle_with_related_images_1_digest": {
                "brew": {
                    "build": "bundle1_nvr-1-1"
                }
            },
            "bundle_with_related_images_2_digest": {
                "brew": {
                    "build": "bundle2_nvr-1-1"
                }
            },
        }
        self.pyxis(
        ).get_manifest_list_digest_by_nvr.side_effect = lambda x: nvr_to_digest[
            x]
        self.pyxis().get_operator_indices.return_value = []
        self.pyxis().get_latest_bundles.return_value = bundles
        # return bundles for original images
        self.pyxis(
        ).get_bundles_by_related_image_digest.side_effect = lambda x, y: bundles_with_related_images[
            x]
        self.pyxis(
        ).get_images_by_digest.side_effect = lambda x: [image_by_digest[x]]
        self.handler.image_has_auto_rebuild_tag = MagicMock(return_value=True)
        get_build = self.patcher.patch(
            "freshmaker.kojiservice.KojiService.get_build")
        builds = {
            "bundle1_nvr-1-1": {
                "task_id": 1,
                "extra": {
                    "image": {
                        "operator_manifests": {
                            "related_images": {
                                "created_by_osbs":
                                True,
                                "pullspecs": [{
                                    "new":
                                    "registry/repo/operator1@original_1_digest",
                                    "original":
                                    "registry/repo/operator1:v2.2.0",
                                    "pinned": True,
                                }]
                            },
                        }
                    }
                }
            },
            "bundle2_nvr-1-1": {
                "task_id": 2,
                "extra": {
                    "image": {
                        "operator_manifests": {
                            "related_images": {
                                "created_by_osbs":
                                True,
                                "pullspecs": [{
                                    "new":
                                    "registry/repo/operator2@original_2_digest",
                                    "original":
                                    "registry/repo/operator2:v2.2.0",
                                    "pinned": True,
                                }]
                            },
                        }
                    }
                }
            }
        }
        get_build.side_effect = lambda x: builds[x]
        db_event = Event.get_or_create_from_event(db.session, event)
        self.handler._prepare_builds = MagicMock(return_value=[
            ArtifactBuild.create(db.session,
                                 db_event,
                                 "ed0",
                                 "image",
                                 1234,
                                 original_nvr="some_name-2-12345",
                                 rebuilt_nvr="some_name-2-12346"),
            ArtifactBuild.create(db.session,
                                 db_event,
                                 "ed0",
                                 "image",
                                 12345,
                                 original_nvr="some_name_2-2-2",
                                 rebuilt_nvr="some_name_2-2-210")
        ])
        self.handler.start_to_build_images = MagicMock()
        db.session.commit()

        now = datetime(year=2020, month=12, day=25, hour=0, minute=0, second=0)
        with freezegun.freeze_time(now):
            self.handler.handle(event)

        self.assertEqual(db_event.state, EventState.BUILDING.value)
        get_build.assert_has_calls(
            [call("bundle1_nvr-1-1"),
             call("bundle2_nvr-1-1")], any_order=True)
        bundles_by_digest = {
            "bundle_with_related_images_1_digest": {
                "append": {
                    "spec": {
                        "skips": ["1.2.3"]
                    }
                },
                "auto_rebuild":
                True,
                "images": [{
                    "brew": {
                        "build": "bundle1_nvr-1-1"
                    }
                }],
                "nvr":
                "bundle1_nvr-1-1",
                "osbs_pinning":
                True,
                "pullspecs": [{
                    "new": "registry/repo/operator1@some_name-1-12345_digest",
                    "original": "registry/repo/operator1:v2.2.0",
                    "pinned": True,
                }],
                "update": {
                    "metadata": {
                        "name": "image.1.2.3+0.1608854400.patched",
                        "substitutes-for": "1.2.3",
                    },
                    "spec": {
                        "version": "1.2.3+0.1608854400.patched"
                    },
                },
            },
            "bundle_with_related_images_2_digest": {
                "append": {
                    "spec": {
                        "skips": ["1.2.4"]
                    }
                },
                "auto_rebuild":
                True,
                "images": [{
                    "brew": {
                        "build": "bundle2_nvr-1-1"
                    }
                }],
                "nvr":
                "bundle2_nvr-1-1",
                "osbs_pinning":
                True,
                "pullspecs": [{
                    "new": "registry/repo/operator2@some_name_2-2-2_digest",
                    "original": "registry/repo/operator2:v2.2.0",
                    "pinned": True,
                }],
                "update": {
                    "metadata": {
                        "name": "image.1.2.4+0.1608854400.patched",
                        "substitutes-for": "1.2.4",
                    },
                    "spec": {
                        "version": "1.2.4+0.1608854400.patched"
                    },
                },
            },
        }
        self.handler._prepare_builds.assert_called_with(
            db_event, bundles_by_digest, {
                'bundle_with_related_images_1_digest',
                'bundle_with_related_images_2_digest'
            })