def _get_shipped_images(runtime, brew_session):
    # retrieve all image builds ever shipped for this version (potential operands)
    tag = f"{runtime.branch}-container-released"
    tags = {tag, tag.replace('-rhel-7-', '-rhel-8-')}  # may be one or two depending
    released = brew.get_tagged_builds(tags, build_type='image', event=None, session=brew_session)
    released = brew.get_build_objects([b['build_id'] for b in released], session=brew_session)
    return [b for b in released if _is_image(b)]  # filter out source images
Esempio n. 2
0
def get_golang_container_nvrs(nvrs, logger):
    all_build_objs = brew.get_build_objects(
        ['{}-{}-{}'.format(*n) for n in nvrs])
    go_container_nvrs = {}
    for build in all_build_objs:
        go_version = 'N/A'
        nvr = (build['name'], build['version'], build['release'])
        name = nvr[0]
        if 'golang-builder' in name or 'go-toolset' in name:
            go_version = golang_builder_version(nvr, logger)
            go_container_nvrs[name] = {'nvr': nvr, 'go': go_version}
            continue

        try:
            parents = build['extra']['image']['parent_image_builds']
        except KeyError:
            logger.debug(f'Could not find parent build image for {nvr}')
            continue

        for p, pinfo in parents.items():
            if 'builder' in p or 'go-toolset' in p:
                go_version = pinfo.get('nvr')

        go_container_nvrs[name] = {'nvr': nvr, 'go': go_version}
        if not go_version or go_version == 'N/A':
            logger.debug(f'Could not find parent Go builder image for {nvr}')

    return go_container_nvrs
Esempio n. 3
0
def _fetch_nvrps_by_nvr_or_id(ids_or_nvrs,
                              tag_pv_map,
                              ignore_product_version=False):
    session = koji.ClientSession(constants.BREW_HUB)
    builds = brew.get_build_objects(ids_or_nvrs, session)
    nonexistent_builds = list(
        filter(lambda b: b[1] is None, zip(ids_or_nvrs, builds)))
    if nonexistent_builds:
        raise ValueError(
            f"The following builds are not found in Brew: {' '.join(map(lambda b: b[0],nonexistent_builds))}"
        )
    nvrps = []
    if ignore_product_version:
        for build in builds:
            nvrps.append(
                (build["name"], build["version"], build["release"], None))
        return nvrps
    for build, tags in zip(builds, brew.get_builds_tags(builds, session)):
        tag_names = {tag["name"] for tag in tags}
        product_versions = [
            pv for tag, pv in tag_pv_map.items() if tag in tag_names
        ]
        if not product_versions:
            raise ValueError(
                f"Build {build['nvr']} doesn't have any of the following whitelisted tags: {list(tag_pv_map.keys())}"
            )
        for pv in product_versions:
            nvrps.append(
                (build["name"], build["version"], build["release"], pv))
    return nvrps
Esempio n. 4
0
def get_parent_build_ids(builds):
    parents = []
    for b in builds:
        if b.get("extra") is None:
            b = brew.get_build_objects([b["id"]])[0]
        parent = b["extra"]["image"].get("parent_build_id")
        parents.append(parent)
    return parents
Esempio n. 5
0
def _fetch_builds_by_nvr_or_id(ids_or_nvrs, tag_pv_map):
    session = koji.ClientSession(constants.BREW_HUB)
    builds = brew.get_build_objects(ids_or_nvrs, session)
    nvrps = []
    for index, tags in enumerate(brew.get_builds_tags(builds, session)):
        build = builds[index]  # type: dict
        tag_names = {tag["name"] for tag in tags}
        for tag, prod_version in tag_pv_map.items():
            if tag in tag_names:
                nvrps.append((build["name"], build["version"],
                              build["release"], prod_version))
    return nvrps
def _get_attached_image_builds(brew_session, advisories):
    # get all attached image builds
    build_nvrs = []
    try:
        for advisory in advisories:
            green_print(f"Retrieving builds from advisory {advisory}")
            advisory = Erratum(errata_id=advisory)
            for build_list in advisory.errata_builds.values():  # one per product version
                build_nvrs.extend(build_list)
    except GSSError:
        exit_unauthenticated()

    green_print(f"Found {len(build_nvrs)} builds")
    return [build for build in brew.get_build_objects(build_nvrs, brew_session) if _is_image(build)]
Esempio n. 7
0
def get_container_golang_versions(advisory_id: str):
    all_builds = errata.get_brew_builds(advisory_id)

    all_build_objs = brew.get_build_objects([b.nvr for b in all_builds])
    for build in all_build_objs:
        golang_version = None
        name = build.get('name')
        try:
            parents = build['extra']['image']['parent_image_builds']
        except KeyError:
            print('Could not get parent image info for {}'.format(name))
            continue

        for p, pinfo in parents.items():
            if 'builder' in p:
                golang_version = pinfo.get('nvr')

        if golang_version is not None:
            print('{}:\t{}'.format(name, golang_version))
Esempio n. 8
0
 def _get_builds(self, ids_or_nvrs: Iterable[Union[int,
                                                   str]]) -> List[Dict]:
     """ Get build dicts from Brew. This method uses an internal cache to avoid unnecessary queries.
     :params ids_or_nvrs: list of build IDs or NVRs
     :return: a list of Brew build dicts
     """
     cache_miss = set(ids_or_nvrs) - self._build_cache.keys()
     if cache_miss:
         cache_miss = [
             strip_epoch(item) if isinstance(item, str) else item
             for item in cache_miss
         ]
         builds = get_build_objects(cache_miss, self._koji_api)
         for id_or_nvre, build in zip(cache_miss, builds):
             if build:
                 self._cache_build(build)
             else:
                 self._build_cache[
                     id_or_nvre] = None  # None indicates the build ID or NVRE doesn't exist
     return [self._build_cache[id] for id in ids_or_nvrs]
Esempio n. 9
0
def verify_cvp_cli(runtime: Runtime, all_images, nvrs, optional_checks,
                   all_optional_checks, fix, message):
    """ Verify CVP test results

    Example 1: Verify CVP test results for all latest 4.4 image builds, also warn those with failed content_set_check

    $ elliott --group openshift-4.4 verify-cvp --all --include-optional-check content_set_check

    Example 2: Apply patches to ocp-build-data to fix the redundant content sets error:

    $ elliott --group openshift-4.4 verify-cvp --all --include-optional-check content_set_check --fix

    Note:
    1. If `--message` is not given, `--fix` will leave changed ocp-build-data files uncommitted.
    2. Make sure your ocp-build-data directory is clean before running `--fix`.
    """
    if bool(all_images) + bool(nvrs) != 1:
        raise click.BadParameter('You must use one of --all or --build.')
    if all_optional_checks and optional_checks:
        raise click.BadParameter(
            'Use only one of --all-optional-checks or --include-optional-check.'
        )

    runtime.initialize(mode='images')
    tag_pv_map = runtime.gitdata.load_data(
        key='erratatool',
        replace_vars=runtime.group_config.vars.primitive()
        if runtime.group_config.vars else
        {}).data.get('brew_tag_product_version_mapping')
    brew_session = koji.ClientSession(runtime.group_config.urls.brewhub
                                      or constants.BREW_HUB)

    builds = []
    if all_images:
        runtime.logger.info("Getting latest image builds from Brew...")
        builds = get_latest_image_builds(brew_session, tag_pv_map.keys(),
                                         runtime.image_metas)
    elif nvrs:
        runtime.logger.info(f"Finding {len(builds)} builds from Brew...")
        builds = brew.get_build_objects(nvrs, brew_session)
    runtime.logger.info(f"Found {len(builds)} image builds.")

    resultsdb_api = ResultsDBAPI()
    nvrs = [b["nvr"] for b in builds]
    runtime.logger.info(
        f"Getting CVP test results for {len(builds)} image builds...")
    latest_cvp_results = get_latest_cvp_results(runtime, resultsdb_api, nvrs)

    # print a summary for all CVP results
    good_results = []  # good means PASSED or INFO
    bad_results = []  # bad means NEEDS_INSPECTION or FAILED
    incomplete_nvrs = []
    for nvr, result in zip(nvrs, latest_cvp_results):
        if not result:
            incomplete_nvrs.append(nvr)
            continue
        outcome = result.get(
            "outcome"
        )  # only PASSED, FAILED, INFO, NEEDS_INSPECTION are now valid outcome values (https://resultsdb20.docs.apiary.io/#introduction/changes-since-1.0)
        if outcome in {"PASSED", "INFO"}:
            good_results.append(result)
        elif outcome in {"NEEDS_INSPECTION", "FAILED"}:
            bad_results.append(result)
    green_prefix("good: {}".format(len(good_results)))
    click.echo(", ", nl=False)
    red_prefix("bad: {}".format(len(bad_results)))
    click.echo(", ", nl=False)
    yellow_print("incomplete: {}".format(len(incomplete_nvrs)))

    if bad_results:
        red_print("The following builds didn't pass CVP tests:")
        for r in bad_results:
            nvr = r["data"]["item"][0]
            red_print(f"{nvr} {r['outcome']}: {r['ref_url']}")

    if incomplete_nvrs:
        yellow_print(
            "We couldn't find CVP test results for the following builds:")
        for nvr in incomplete_nvrs:
            yellow_print(nvr)

    if not optional_checks and not all_optional_checks:
        return  # no need to print failed optional CVP checks
    # Find failed optional CVP checks in case some of the tiem *will* become required.
    optional_checks = set(optional_checks)
    complete_results = good_results + bad_results
    runtime.logger.info(
        f"Getting optional checks for {len(complete_results)} CVP tests...")
    optional_check_results = get_optional_checks(runtime, complete_results)

    component_distgit_keys = {
    }  # a dict of brew component names to distgit keys
    content_set_repo_names = {
    }  # a map of x86_64 content set names to group.yml repo names
    if fix:  # Fixing redundant content sets requires those dicts
        for image in runtime.image_metas():
            component_distgit_keys[
                image.get_component_name()] = image.distgit_key
        for repo_name, repo_info in runtime.group_config.get("repos",
                                                             {}).items():
            content_set_name = repo_info.get(
                'content_set', {}).get('x86_64') or repo_info.get(
                    'content_set', {}).get('default')
            if content_set_name:
                content_set_repo_names[content_set_name] = repo_name

    ocp_build_data_updated = False

    for cvp_result, checks in zip(complete_results, optional_check_results):
        # example optional checks: http://external-ci-coldstorage.datahub.redhat.com/cvp/cvp-product-test/hive-container-v4.6.0-202008010302.p0/da01e36c-8c69-4a19-be7d-ba4593a7b085/sanity-tests-optional-results.json
        bad_checks = [
            check for check in checks["checks"]
            if check["status"] != "PASS" and (
                all_optional_checks or check["name"] in optional_checks)
        ]
        if not bad_checks:
            continue
        nvr = cvp_result["data"]["item"][0]
        yellow_print("----------")
        yellow_print(
            f"Build {nvr} has {len(bad_checks)} problematic CVP optional checks:"
        )
        for check in bad_checks:
            yellow_print(f"* {check['name']} {check['status']}")
            if fix and check["name"] == "content_set_check":
                if "Some content sets are redundant." in check["logs"]:
                    # fix redundant content sets
                    name = nvr.rsplit('-', 2)[0]
                    distgit_keys = component_distgit_keys.get(name)
                    if not distgit_keys:
                        runtime.logger.warning(
                            f"Will not apply the redundant content sets fix to image {name}: We don't know its distgit key."
                        )
                        continue
                    amd64_content_sets = list(
                        filter(lambda item: item.get("arch") == "amd64",
                               check["logs"][-1])
                    )  # seems only x86_64 (amd64) content sets are defined in ocp-build-data.
                    if not amd64_content_sets:
                        runtime.logger.warning(
                            f"Will not apply the redundant content sets fix to image {name}: It doesn't have redundant x86_64 (amd64) content sets"
                        )
                        continue
                    amd64_redundant_cs = amd64_content_sets[0]["redundant_cs"]
                    redundant_repos = [
                        content_set_repo_names[cs] for cs in amd64_redundant_cs
                        if cs in content_set_repo_names
                    ]
                    if len(redundant_repos) != len(amd64_redundant_cs):
                        runtime.logger.error(
                            f"Not all content sets have a repo entry in group.yml: #content_sets is {len(amd64_redundant_cs)}, #repos is {len(redundant_repos)}"
                        )
                    runtime.logger.info(
                        f"Applying redundant content sets fix to {distgit_keys}..."
                    )
                    fix_redundant_content_set(runtime, distgit_keys,
                                              redundant_repos)
                    ocp_build_data_updated = True
                    runtime.logger.info(
                        f"Fixed redundant content sets for {distgit_keys}")
        yellow_print(
            f"See {cvp_result['ref_url']}sanity-tests-optional-results.json for more details."
        )

    if message and ocp_build_data_updated:
        runtime.gitdata.commit(message)
Esempio n. 10
0
    def test_get_build_objects(self):
        build_infos = {
            "logging-fluentd-container-v3.11.141-2": {
                "cg_id": None,
                "package_name": "logging-fluentd-container",
                "extra": {
                    "submitter": "osbs",
                    "image": {
                        "media_types": [
                            "application/vnd.docker.distribution.manifest.list.v2+json",
                            "application/vnd.docker.distribution.manifest.v1+json",
                            "application/vnd.docker.distribution.manifest.v2+json"
                        ],
                        "help":
                        None,
                        "index": {
                            "pull": [
                                "brew-pulp-docker01.web.prod.ext.phx2.redhat.com:8888/openshift3/ose-logging-fluentd@sha256:1df5eacdd98923590afdc85330aaac0488de96e991b24a7f4cb60113b7a66e80",
                                "brew-pulp-docker01.web.prod.ext.phx2.redhat.com:8888/openshift3/ose-logging-fluentd:v3.11.141-2"
                            ],
                            "digests": {
                                "application/vnd.docker.distribution.manifest.list.v2+json":
                                "sha256:1df5eacdd98923590afdc85330aaac0488de96e991b24a7f4cb60113b7a66e80"
                            },
                            "tags": ["v3.11.141-2"]
                        },
                        "autorebuild":
                        False,
                        "isolated":
                        False,
                        "yum_repourls": [
                            "http://pkgs.devel.redhat.com/cgit/containers/logging-fluentd/plain/.oit/signed.repo?h=rhaos-3.11-rhel-7"
                        ],
                        "parent_build_id":
                        955726,
                        "parent_images": ["openshift/ose-base:rhel7"],
                        "parent_image_builds": {
                            "openshift/ose-base:rhel7": {
                                "id":
                                955726,
                                "nvr":
                                "openshift-enterprise-base-container-v4.0-201908250221"
                            }
                        }
                    },
                    "container_koji_task_id": 23188768
                },
                "creation_time": "2019-08-26 07:34:32.613833",
                "completion_time": "2019-08-26 07:34:31",
                "package_id": 67151,
                "cg_name": None,
                "id": 956245,
                "build_id": 956245,
                "epoch": None,
                "source":
                "git://pkgs.devel.redhat.com/containers/logging-fluentd#7f4bcdc798fd72414a29dc1010c448e1ed52f591",
                "state": 1,
                "version": "v3.11.141",
                "completion_ts": 1566804871.0,
                "owner_id": 4078,
                "owner_name": "ocp-build/buildvm.openshift.eng.bos.redhat.com",
                "nvr": "logging-fluentd-container-v3.11.141-2",
                "start_time": "2019-08-26 07:03:41",
                "creation_event_id": 26029088,
                "start_ts": 1566803021.0,
                "volume_id": 0,
                "creation_ts": 1566804872.61383,
                "name": "logging-fluentd-container",
                "task_id": None,
                "volume_name": "DEFAULT",
                "release": "2"
            },
            "logging-fluentd-container-v4.1.14-201908291507": {
                "cg_id": None,
                "package_name": "logging-fluentd-container",
                "extra": {
                    "submitter": "osbs",
                    "image": {
                        "media_types": [
                            "application/vnd.docker.distribution.manifest.list.v2+json",
                            "application/vnd.docker.distribution.manifest.v1+json",
                            "application/vnd.docker.distribution.manifest.v2+json"
                        ],
                        "help":
                        None,
                        "index": {
                            "unique_tags": [
                                "rhaos-4.1-rhel-7-containers-candidate-94076-20190829211225"
                            ],
                            "pull": [
                                "brew-pulp-docker01.web.prod.ext.phx2.redhat.com:8888/openshift/ose-logging-fluentd@sha256:7503f828aaf80e04b2aaab0b88626b97a20e5600ba75fef8b764e02cc1164a7c",
                                "brew-pulp-docker01.web.prod.ext.phx2.redhat.com:8888/openshift/ose-logging-fluentd:v4.1.14-201908291507"
                            ],
                            "floating_tags": [
                                "latest", "v4.1.14", "v4.1.14.20190829.150756",
                                "v4.1"
                            ],
                            "digests": {
                                "application/vnd.docker.distribution.manifest.list.v2+json":
                                "sha256:7503f828aaf80e04b2aaab0b88626b97a20e5600ba75fef8b764e02cc1164a7c"
                            },
                            "tags": ["v4.1.14-201908291507"]
                        },
                        "autorebuild":
                        False,
                        "isolated":
                        False,
                        "yum_repourls": [
                            "http://pkgs.devel.redhat.com/cgit/containers/logging-fluentd/plain/.oit/signed.repo?h=rhaos-4.1-rhel-7"
                        ],
                        "parent_build_id":
                        958278,
                        "parent_images": [
                            "rhscl/ruby-25-rhel7:latest",
                            "openshift/ose-base:ubi7"
                        ],
                        "parent_image_builds": {
                            "openshift/ose-base:ubi7": {
                                "id":
                                958278,
                                "nvr":
                                "openshift-enterprise-base-container-v4.0-201908290538"
                            },
                            "rhscl/ruby-25-rhel7:latest": {
                                "id": 957642,
                                "nvr": "rh-ruby25-container-2.5-50"
                            }
                        }
                    },
                    "container_koji_task_id": 23241046
                },
                "creation_time": "2019-08-29 21:42:46.062037",
                "completion_time": "2019-08-29 21:42:44",
                "package_id": 67151,
                "cg_name": None,
                "id": 958765,
                "build_id": 958765,
                "epoch": None,
                "source":
                "git://pkgs.devel.redhat.com/containers/logging-fluentd#ecac10b38f035ea2f9ea62b9efa63c051667ebbb",
                "state": 1,
                "version": "v4.1.14",
                "completion_ts": 1567114964.0,
                "owner_id": 4078,
                "owner_name": "ocp-build/buildvm.openshift.eng.bos.redhat.com",
                "nvr": "logging-fluentd-container-v4.1.14-201908291507",
                "start_time": "2019-08-29 21:12:51",
                "creation_event_id": 26063093,
                "start_ts": 1567113171.0,
                "volume_id": 0,
                "creation_ts": 1567114966.06204,
                "name": "logging-fluentd-container",
                "task_id": None,
                "volume_name": "DEFAULT",
                "release": "201908291507"
            },
            "logging-fluentd-container-v4.1.15-201909041605": {
                "cg_id": None,
                "package_name": "logging-fluentd-container",
                "extra": {
                    "submitter": "osbs",
                    "image": {
                        "media_types": [
                            "application/vnd.docker.distribution.manifest.list.v2+json",
                            "application/vnd.docker.distribution.manifest.v1+json",
                            "application/vnd.docker.distribution.manifest.v2+json"
                        ],
                        "help":
                        None,
                        "index": {
                            "unique_tags": [
                                "rhaos-4.1-rhel-7-containers-candidate-96970-20190904214308"
                            ],
                            "pull": [
                                "brew-pulp-docker01.web.prod.ext.phx2.redhat.com:8888/openshift/ose-logging-fluentd@sha256:1ce1555b58982a29354c293948ee6c788743a08f39a0c530be791cb9bdaf4189",
                                "brew-pulp-docker01.web.prod.ext.phx2.redhat.com:8888/openshift/ose-logging-fluentd:v4.1.15-201909041605"
                            ],
                            "floating_tags": [
                                "latest", "v4.1.15", "v4.1",
                                "v4.1.15.20190904.160545"
                            ],
                            "digests": {
                                "application/vnd.docker.distribution.manifest.list.v2+json":
                                "sha256:1ce1555b58982a29354c293948ee6c788743a08f39a0c530be791cb9bdaf4189"
                            },
                            "tags": ["v4.1.15-201909041605"]
                        },
                        "autorebuild":
                        False,
                        "isolated":
                        False,
                        "yum_repourls": [
                            "http://pkgs.devel.redhat.com/cgit/containers/logging-fluentd/plain/.oit/signed.repo?h=rhaos-4.1-rhel-7"
                        ],
                        "parent_build_id":
                        961131,
                        "parent_images": [
                            "rhscl/ruby-25-rhel7:latest",
                            "openshift/ose-base:ubi7"
                        ],
                        "parent_image_builds": {
                            "openshift/ose-base:ubi7": {
                                "id":
                                961131,
                                "nvr":
                                "openshift-enterprise-base-container-v4.0-201909040323"
                            },
                            "rhscl/ruby-25-rhel7:latest": {
                                "id": 957642,
                                "nvr": "rh-ruby25-container-2.5-50"
                            }
                        }
                    },
                    "container_koji_task_id": 23365465
                },
                "creation_time": "2019-09-04 22:17:36.432110",
                "completion_time": "2019-09-04 22:17:35",
                "package_id": 67151,
                "cg_name": None,
                "id": 962144,
                "build_id": 962144,
                "epoch": None,
                "source":
                "git://pkgs.devel.redhat.com/containers/logging-fluentd#31cf3d4264dabb8892fb4b5921e5ff4d5d0ab2de",
                "state": 1,
                "version": "v4.1.15",
                "completion_ts": 1567635455.0,
                "owner_id": 4078,
                "owner_name": "ocp-build/buildvm.openshift.eng.bos.redhat.com",
                "nvr": "logging-fluentd-container-v4.1.15-201909041605",
                "start_time": "2019-09-04 21:43:32",
                "creation_event_id": 26176078,
                "start_ts": 1567633412.0,
                "volume_id": 0,
                "creation_ts": 1567635456.43211,
                "name": "logging-fluentd-container",
                "task_id": None,
                "volume_name": "DEFAULT",
                "release": "201909041605"
            },
        }

        def fake_get_build(nvr):
            return mock.MagicMock(result=build_infos[nvr])

        fake_session = mock.MagicMock()
        fake_context_manager = fake_session.multicall.return_value.__enter__.return_value
        fake_context_manager.getBuild.side_effect = fake_get_build
        expected = list(build_infos.values())
        actual = brew.get_build_objects(build_infos.keys(), fake_session)
        self.assertListEqual(actual, expected)
Esempio n. 11
0
async def verify_cvp_cli(runtime: Runtime, all_images, nvrs, optional_checks, all_optional_checks, fix, message):
    """ Verify CVP test results

    Example 1: Verify CVP test results for all latest 4.4 image builds, also warn those with failed content_set_check

    $ elliott --group openshift-4.4 verify-cvp --all --include-optional-check content_set_check

    Example 2: Apply patches to ocp-build-data to fix the redundant content sets error:

    $ elliott --group openshift-4.4 verify-cvp --all --include-optional-check content_set_check --fix

    Note:
    1. If `--message` is not given, `--fix` will leave changed ocp-build-data files uncommitted.
    2. Make sure your ocp-build-data directory is clean before running `--fix`.
    """
    if bool(all_images) + bool(nvrs) != 1:
        raise click.BadParameter('You must use one of --all or --build.')
    if all_optional_checks and optional_checks:
        raise click.BadParameter('Use only one of --all-optional-checks or --include-optional-check.')

    runtime.initialize(mode='images')
    brew_session = koji.ClientSession(runtime.group_config.urls.brewhub or constants.BREW_HUB)

    builds = []
    if all_images:
        image_metas = runtime.image_metas()
        builds = await get_latest_image_builds(image_metas)
    elif nvrs:
        runtime.logger.info(f"Finding {len(builds)} builds from Brew...")
        builds = brew.get_build_objects(nvrs, brew_session)
    runtime.logger.info(f"Found {len(builds)} image builds.")

    resultsdb_api = ResultsDBAPI()
    nvrs = [b["nvr"] for b in builds]
    runtime.logger.info(f"Getting CVP test results for {len(builds)} image builds...")
    latest_cvp_results = await get_latest_cvp_results(runtime, resultsdb_api, nvrs)

    # print a summary for all CVP results
    good_results = []  # good means PASSED or INFO
    bad_results = []  # bad means NEEDS_INSPECTION or FAILED
    incomplete_nvrs = []
    for nvr, result in zip(nvrs, latest_cvp_results):
        if not result:
            incomplete_nvrs.append(nvr)
            continue
        outcome = result.get("outcome")  # only PASSED, FAILED, INFO, NEEDS_INSPECTION are now valid outcome values (https://resultsdb20.docs.apiary.io/#introduction/changes-since-1.0)
        if outcome in {"PASSED", "INFO"}:
            good_results.append(result)
        elif outcome in {"NEEDS_INSPECTION", "FAILED"}:
            bad_results.append(result)
    green_prefix("good: {}".format(len(good_results)))
    click.echo(", ", nl=False)
    red_prefix("bad: {}".format(len(bad_results)))
    click.echo(", ", nl=False)
    yellow_print("incomplete: {}".format(len(incomplete_nvrs)))

    if bad_results:
        red_print("The following builds didn't pass CVP tests:")
        for r in bad_results:
            nvr = r["data"]["item"][0]
            red_print(f"{nvr} {r['outcome']}: {r['ref_url']}")

    if incomplete_nvrs:
        yellow_print("We couldn't find CVP test results for the following builds:")
        for nvr in incomplete_nvrs:
            yellow_print(nvr)

    if not optional_checks and not all_optional_checks:
        return  # no need to print failed optional CVP checks
    # Find failed optional CVP checks in case some of the tiem *will* become required.
    optional_checks = set(optional_checks)
    complete_results = good_results + bad_results
    runtime.logger.info(f"Getting optional checks for {len(complete_results)} CVP tests...")
    optional_check_results = await get_optional_checks(runtime, complete_results)

    component_distgit_keys = {}  # a dict of brew component names to distgit keys
    content_set_to_repo_names = {}  # a map of content set names to group.yml repo names
    for image in runtime.image_metas():
        component_distgit_keys[image.get_component_name()] = image.distgit_key
    for repo_name, repo_info in runtime.group_config.get("repos", {}).items():
        for arch, cs_name in repo_info.get('content_set', {}).items():
            if arch == "optional":
                continue  # not a real arch name
            content_set_to_repo_names[cs_name] = repo_name

    nvr_to_builds = {build["nvr"]: build for build in builds}

    ocp_build_data_updated = False

    failed_with_not_covered_rpms = set()
    failed_with_redundant_repos = set()
    only_failed_in_non_x86_with_not_covered_rpms = set()
    only_failed_in_non_x86_with_redundant_repos = set()

    for cvp_result, checks in zip(complete_results, optional_check_results):
        # example optional checks: http://external-ci-coldstorage.datahub.redhat.com/cvp/cvp-product-test/hive-container-v4.6.0-202008010302.p0/da01e36c-8c69-4a19-be7d-ba4593a7b085/sanity-tests-optional-results.json
        bad_checks = [check for check in checks["checks"] if check["status"] != "PASS" and (all_optional_checks or check["name"] in optional_checks)]
        if not bad_checks:
            continue
        nvr = cvp_result["data"]["item"][0]
        build = nvr_to_builds[nvr]
        yellow_print("----------")
        yellow_print(f"Build {nvr} (https://brewweb.engineering.redhat.com/brew/buildinfo?buildID={nvr_to_builds[nvr]['id']}) has {len(bad_checks)} problematic CVP optional checks:")
        for check in bad_checks:
            yellow_print(f"* {check['name']} {check['status']}")
            try:
                amd64_result = list(filter(lambda item: item.get("arch") == "amd64", check["logs"][-1]))
            except AttributeError:
                red_print("CVP result malformed.")
            if len(amd64_result) != 1:
                red_print("WHAT?! This build doesn't include an amd64 image? This shouldn't happen. Check Brew and CVP logs with the CVP team!")
                continue
            amd64_result = amd64_result[0]
            image_component_name = nvr.rsplit('-', 2)[0]
            distgit_key = component_distgit_keys.get(image_component_name)

            amd64_redundant_cs = amd64_result.get("redundant_cs", [])
            amd64_redundant_repos = {content_set_to_repo_names[cs] for cs in amd64_redundant_cs}

            def _strip_arch_suffix(rpm):
                # rh-nodejs10-3.2-3.el7.x86_64 -> rh-nodejs10-3.2-3.el7
                rpm_split = rpm.rsplit(".", 1)
                return rpm_split[0]

            amd64_not_covered_rpms = {_strip_arch_suffix(rpm) for rpm in amd64_result.get("not_covered_rpms", [])}

            if check["name"] == "content_set_check":
                details = check["logs"][-1]  # example: http://external-ci-coldstorage.datahub.redhat.com/cvp/cvp-product-test/logging-fluentd-container-v4.6.0-202008261251.p0/dd9f2024-5440-4f33-b508-472ccf258439/sanity-tests-optional-results.json
                if not details:
                    red_print("content_set_check failed without any explanation. Report to CVP team!")
                    continue
                if len(details) > 1:  # if this build is multi-arch, check if all per-arch results are consistent
                    for result in details:
                        if result["arch"] == "amd64":
                            continue
                        redundant_repos = {content_set_to_repo_names[cs] for cs in result.get("redundant_cs", [])}
                        if redundant_repos != amd64_redundant_repos:
                            only_failed_in_non_x86_with_redundant_repos.add(nvr)
                            red_print(f"""content_set_check for {nvr} arch {result["arch"]} has different redundant_cs result from the one for amd64:
                            {result["arch"]} has redundant_cs {result.get("redundant_cs")},
                            but amd64 has redundant_cs {amd64_redundant_cs}.
                            Not sure what happened. Please see Brew and CVP logs and/or check with the CVP team.""")
                        not_covered_rpms = {_strip_arch_suffix(rpm) for rpm in result.get("not_covered_rpms", [])}
                        if not_covered_rpms != amd64_not_covered_rpms:
                            only_failed_in_non_x86_with_not_covered_rpms.add(nvr)
                            red_print(f"""content_set_check for {nvr} arch {result["arch"]} has different not_covered_rpms result from the one for amd64:
                            {result["arch"]} has extra not_covered_rpms {not_covered_rpms - amd64_not_covered_rpms},
                            and missing not_covered_rpms {amd64_not_covered_rpms - not_covered_rpms}.
                            Not sure what happened. Check Brew and CVP logs with the CVP team!""")

                if amd64_not_covered_rpms:  # This build has not_covered_rpms
                    failed_with_not_covered_rpms.add(nvr)
                    yellow_print(f"Image {distgit_key} has not_covered_rpms: {amd64_not_covered_rpms}")
                    brew_repos = await find_repos_for_rpms(amd64_not_covered_rpms, build)
                    yellow_print(f"Those repos shown in Brew logs might be a good hint: {brew_repos}")
                    runtime.logger.info("Looking for parent image's content_sets...")
                    parent = get_parent_build_ids([build])[0]
                    if parent:
                        parent_build = brew.get_build_objects([parent])[0]
                        parent_cs = await get_content_sets_for_build(parent_build)
                        parent_enabled_repos = {content_set_to_repo_names[cs] for cs in parent_cs.get("x86_64", [])}
                        enabled_repos = set(runtime.image_map[distgit_key].config.get("enabled_repos", []))
                        missing_repos = parent_enabled_repos - enabled_repos
                        yellow_print(f"""The following repos are defined in parent {parent_build["nvr"]} {component_distgit_keys.get(parent_build["name"], "?")}.yml but not in
                                     {component_distgit_keys[build["name"]]}.yml: {missing_repos}""")
                        if fix and missing_repos:
                            runtime.logger.info("Trying to merge parent image's content_sets...")
                            fix_missing_content_set(runtime, distgit_key, missing_repos)
                            ocp_build_data_updated = True
                            runtime.logger.info(f"{distgit_key}.yml patched")

                if amd64_redundant_repos:  # This build has redundant_cs
                    failed_with_redundant_repos.add(nvr)
                    yellow_print(f"Image {distgit_key} has redundant repos: {amd64_redundant_repos}")
                    if not fix:
                        yellow_print(f"Please add the following repos to non_shipping_repos in {distgit_key}.yml: {amd64_redundant_repos}")
                    else:
                        runtime.logger.info(f"Applying redundant content sets fix to {distgit_key}.yml...")
                        fix_redundant_content_set(runtime, distgit_key, amd64_redundant_repos)
                        ocp_build_data_updated = True
                        runtime.logger.info(f"{distgit_key}.yml patched")

        print(f"See {cvp_result['ref_url']}sanity-tests-optional-results.json for more details.")

    if failed_with_not_covered_rpms or failed_with_redundant_repos:
        yellow_print(f"{len(failed_with_not_covered_rpms | failed_with_redundant_repos)} images failed content_sets.\n Where")

    if failed_with_not_covered_rpms:
        yellow_print(f"\t{len(failed_with_not_covered_rpms)} images failed content_sets check because of not_covered_rpms:")
        for rpm in failed_with_not_covered_rpms:
            line = f"\t\t{rpm}"
            if rpm in only_failed_in_non_x86_with_not_covered_rpms:
                line += " - non-x86 arches are different from x86 one"
            yellow_print(line)
    if failed_with_redundant_repos:
        yellow_print(f"\t{len(failed_with_redundant_repos)} images failed content_sets check because of redundant_repos:")
        for rpm in failed_with_redundant_repos:
            line = f"\t\t{rpm}"
            if rpm in only_failed_in_non_x86_with_redundant_repos:
                line += " - non-x86 arches are different from x86 one"
            yellow_print(line)

    if message and ocp_build_data_updated:
        runtime.gitdata.commit(message)
Esempio n. 12
0
def create(ctx, advisories, out_dir, out_layout, components, force):
    """ Create tarball sources for advisories.

    To create tarball sources for Brew component (package) logging-fluentd-container that was shipped on advisories 45606, 45527, and 46049:
    $ elliott tarball-sources create --component logging-fluentd-container --out-dir=out/ 45606 45527 46049
    """

    if not force and os.path.isdir(out_dir) and os.listdir(out_dir):
        util.red_print("Output directory {} is not empty.\n\
Use --force to add new tarball sources to an existing directory.".format(
            os.path.abspath(out_dir)))
        exit(1)
    mkdirs(out_dir)

    working_dir = os.path.join(ctx.obj.working_dir, "tarball-sources")
    LOGGER.debug("Use working directory {}.".format(
        os.path.abspath(working_dir)))
    mkdirs(working_dir)

    # `nvr_dirs` is a dict with brew build NVRs as keys, values are
    # a set of directories for the generated tarballs,
    # since a build can be attached to multiple advisories.
    # For example:
    # nvr_dirs = {
    #   "logging-fluentd-container-v3.11.141-2": {
    #     "RHOSE/RHEL-7-OSE-3.11/45606/release/"
    #   },
    #   "logging-fluentd-container-v4.1.14-201908291507": {
    #     "RHOSE/RHEL-7-OSE-4.1/45527/release/"
    #   },
    #   "logging-fluentd-container-v4.1.15-201909041605": {
    #     "RHOSE/RHEL-7-OSE-4.1/46049/release/"
    #   }
    # }
    nvr_dirs = {}  # type: Dict[str, Set[str]]

    # Getting build NVRs for specified Koji/Brew components from advisories
    # NOTE This is SLOW. However doing this in parallel doesn't work
    # due to a race condition existing in the implementation of `errata_tool.Erratum`'s parant class ErrataConnector.
    for advisory in advisories:
        click.echo("Finding builds from advisory {}...".format(advisory))
        builds = tarball_sources.find_builds_from_advisory(
            advisory, components)
        if not builds:
            util.yellow_print(
                "No matched builds found from advisory {}. Wrong advisory number?"
                .format(advisory))
            continue
        util.green_print("Found {} matched build(s) from advisory {}".format(
            len(builds), advisory))
        for nvr, product, product_version in builds:
            util.green_print("\t{}\t{}\t{}".format(nvr, product,
                                                   product_version))

        for nvr, product, product_version in builds:
            if nvr not in nvr_dirs:
                nvr_dirs[nvr] = set()
            if out_layout == "flat":
                nvr_dirs[nvr].add(out_dir)
            else:
                nvr_dirs[nvr].add(
                    os.path.join(out_dir, product_version, str(advisory),
                                 "release"))

    if not nvr_dirs:
        util.red_print(
            "Exiting because no matched builds from all specified advisories.")
        exit(1)

    # Check build infos from Koji/Brew
    # in order to figure out the source Git repo and commit hash for each build.
    click.echo("Fetching build infos for {} from Koji/Brew...".format(
        ", ".join(nvr_dirs.keys())))
    brew_session = koji.ClientSession(constants.BREW_HUB)
    brew_builds = brew.get_build_objects(nvr_dirs.keys(), brew_session)

    # Ready to generate tarballs
    tarball_sources_list = []
    for build_info in brew_builds:
        nvr = build_info["nvr"]
        tarball_filename = nvr + ".tar.gz"
        click.echo("Generating tarball source {} for {}...".format(
            tarball_filename, nvr))

        with tempfile.NamedTemporaryFile(suffix="-" + tarball_filename,
                                         dir=working_dir) as temp_tarball:
            temp_tarball_path = temp_tarball.name
            LOGGER.debug(
                "Temporary tarball file is {}".format(temp_tarball_path))

            tarball_sources.generate_tarball_source(
                temp_tarball, nvr + "/",
                os.path.join(working_dir, "repos", build_info["name"]),
                build_info["source"])
            for dest_dir in nvr_dirs[nvr]:
                mkdirs(dest_dir)
                tarball_abspath = os.path.abspath(
                    os.path.join(dest_dir, tarball_filename))
                if os.path.exists(tarball_abspath):
                    util.yellow_print(
                        "File {} will be overwritten.".format(tarball_abspath))

                LOGGER.debug("Copying {} to {}...".format(
                    temp_tarball_path, tarball_abspath))
                shutil.copyfile(
                    temp_tarball_path,
                    tarball_abspath)  # `shutil.copyfile` uses default umask
                tarball_sources_list.append(tarball_abspath)
                util.green_print(
                    "Created tarball source {}.".format(tarball_abspath))

    print_success_message(tarball_sources_list, out_dir)
Esempio n. 13
0
def tag_builds_cli(runtime: Runtime, advisories: Tuple[int],
                   default_advisory_type: str, product_version: str,
                   builds: Tuple[str], tag: str, dont_untag: bool,
                   dry_run: bool):
    """ Tag builds into Brew tag and optionally untag unspecified builds.

    Example 1: Tag RHEL7 RPMs that on ocp-build-data recorded advisory into rhaos-4.3-rhel-7-image-build

    $ elliott --group=openshift-4.3 tag-builds --use-default-advisory rpm --product-version RHEL-7-OSE-4.3 --tag rhaos-4.3-rhel-7-image-build

    Example 2: Tag RHEL8 RPMs that are on advisory 55016 into rhaos-4.3-rhel-8-image-build

    $ elliott --group=openshift-4.3 tag-builds --advisory 55016 --product-version OSE-4.4-RHEL-8 --tag rhaos-4.3-rhel-8-image-build

    Example 3: Tag specified builds into rhaos-4.3-rhel-8-image-build

    $ elliott --group=openshift-4.3 tag-builds --build buildah-1.11.6-6.rhaos4.3.el8 --build openshift-4.3.23-202005230952.g1.b596217.el8 --tag rhaos-4.3-rhel-8-image-build
    """
    if advisories and builds:
        raise click.BadParameter('Use only one of --build or --advisory/-a.')
    if advisories and default_advisory_type:
        raise click.BadParameter(
            'Use only one of --use-default-advisory or --advisory/-a.')
    if default_advisory_type and builds:
        raise click.BadParameter(
            'Use only one of --build or --use-default-advisory.')
    if product_version and not advisories and not default_advisory_type:
        raise click.BadParameter(
            '--product-version should only be used with --use-default-advisory or --advisory/-a.'
        )

    runtime.initialize()
    logger = runtime.logger
    if default_advisory_type:
        advisories = (find_default_advisory(runtime, default_advisory_type), )

    all_builds = set()  # All Brew builds that should be in the tag

    if advisories:
        errata_session = requests.session()
        for advisory in advisories:
            logger.info(
                f"Fetching attached Brew builds from advisory {advisory}...")
            errata_builds = errata.get_builds(advisory, errata_session)
            product_versions = list(errata_builds.keys())
            logger.debug(
                f"Advisory {advisory} has builds for {len(product_versions)} product versions: {product_versions}"
            )
            if product_version:  # Only this product version should be concerned
                product_versions = [product_version]
            for pv in product_versions:
                logger.debug(f"Extract Errata builds for product version {pv}")
                nvrs = _extract_nvrs_from_errata_build_list(errata_builds, pv)
                logger.info(
                    f"Found {len(nvrs)} builds from advisory {advisory} with product version {pv}"
                )
                logger.debug(
                    f"The following builds are found for product version {pv}:\n\t{list(nvrs)}"
                )
                all_builds |= set(nvrs)

    brew_session = koji.ClientSession(runtime.group_config.urls.brewhub
                                      or constants.BREW_HUB)
    if builds:  # NVRs are directly specified with --build
        build_objs = brew.get_build_objects(list(builds), brew_session)
        all_builds = {build["nvr"] for build in build_objs}

    click.echo(
        f"The following {len(all_builds)} build(s) should be in tag {tag}:")
    for nvr in all_builds:
        green_print(f"\t{nvr}")

    # get NVRs that have been tagged
    tagged_build_objs = brew_session.listTagged(tag,
                                                latest=False,
                                                inherit=False)
    tagged_builds = {build["nvr"] for build in tagged_build_objs}

    # get NVRs that should be tagged
    missing_builds = all_builds - tagged_builds
    click.echo(f"{len(missing_builds)} build(s) need to be tagged into {tag}:")
    for nvr in missing_builds:
        green_print(f"\t{nvr}")

    # get NVRs that should be untagged
    extra_builds = tagged_builds - all_builds
    click.echo(f"{len(extra_builds)} build(s) need to be untagged from {tag}:")
    for nvr in extra_builds:
        green_print(f"\t{nvr}")

    if dry_run:
        yellow_print("Dry run: Do nothing.")
        return

    brew_session.gssapi_login()

    if not dont_untag:
        # untag extra builds
        extra_builds = list(extra_builds)
        logger.info(f"Untagging {len(extra_builds)} build(s) from {tag}...")
        multicall_tasks = brew.untag_builds(tag, extra_builds, brew_session)
        failed_to_untag = []
        for index, task in enumerate(multicall_tasks):
            try:
                task.result
                click.echo(f"{nvr} has been successfully untagged from {tag}")
            except Exception as ex:
                nvr = extra_builds[index]
                failed_to_untag.append(nvr)
                logger.error(f"Failed to untag {nvr}: {ex}")

    # tag missing builds
    missing_builds = list(missing_builds)
    task_id_nvr_map = {}
    logger.info(f"Tagging {len(missing_builds)} build(s) into {tag}...")
    multicall_tasks = brew.tag_builds(tag, missing_builds, brew_session)
    failed_to_tag = []
    for index, task in enumerate(multicall_tasks):
        nvr = missing_builds[index]
        try:
            task_id = task.result
            task_id_nvr_map[task_id] = nvr
        except Exception as ex:
            failed_to_tag.append(nvr)
            logger.error(f"Failed to tag {nvr}: {ex}")

    if task_id_nvr_map:
        # wait for tag task to finish
        logger.info("Waiting for tag tasks to finish")
        brew.wait_tasks(task_id_nvr_map.keys(), brew_session, logger=logger)
        # get tagging results
        stopped_tasks = list(task_id_nvr_map.keys())
        with brew_session.multicall(strict=False) as m:
            multicall_tasks = []
            for task_id in stopped_tasks:
                multicall_tasks.append(
                    m.getTaskResult(task_id, raise_fault=False))
        for index, t in enumerate(multicall_tasks):
            task_id = stopped_tasks[index]
            nvr = task_id_nvr_map[task_id]
            tag_res = t.result
            logger.debug(
                f"Tagging task {task_id} {nvr} returned result {tag_res}")
            click.echo(f"{nvr} has been successfully tagged into {tag}")
            if tag_res and 'faultCode' in tag_res:
                if "already tagged" not in tag_res["faultString"]:
                    failed_to_tag.append(nvr)
                    logger.error(
                        f'Failed to tag {nvr} into {tag}: {tag_res["faultString"]}'
                    )

    if failed_to_untag:
        red_print("The following builds were failed to untag:")
        for nvr in failed_to_untag:
            red_print(f"\t{nvr}")
    elif not dont_untag:
        green_print(
            f"All unspecified builds have been successfully untagged from {tag}."
        )

    if failed_to_tag:
        red_print("The following builds were failed to tag:")
        for nvr in failed_to_tag:
            red_print(f"\t{nvr}")
    else:
        green_print(f"All builds have been successfully tagged into {tag}.")

    if failed_to_untag or failed_to_tag:
        raise exceptions.ElliottFatalError(
            "Not all builds were successfully tagged/untagged.")