def inspect_stream(runtime, code, strict): code = AssemblyIssueCode[code] if runtime.assembly != 'stream': print( f'Disregarding non-stream assembly: {runtime.assembly}. This command is only intended for stream' ) runtime.assembly = 'stream' runtime.initialize(clone_distgits=False) assembly_inspector = AssemblyInspector(runtime, lite=True) if code == AssemblyIssueCode.INCONSISTENT_RHCOS_RPMS: rhcos_builds, rhcos_inconsistencies = _check_inconsistent_rhcos_rpms( runtime, assembly_inspector) if rhcos_inconsistencies: msg = f'Found RHCOS inconsistencies in builds {rhcos_builds}: {rhcos_inconsistencies}' print(msg) assembly_issue = AssemblyIssue(msg, component='rhcos', code=code) if assembly_inspector.does_permit(assembly_issue): print(f'Assembly permits code {code}.') if not strict: exit(0) print('Running in strict mode') exit(1) print(f'RHCOS builds consistent {rhcos_builds}') exit(0) else: print(f'AssemblyIssueCode {code} not supported at this time :(') exit(1)
def check_rhcos_issues( self, rhcos_build: RHCOSBuildInspector) -> List[AssemblyIssue]: """ Analyzes an RHCOS build to check whether the installed packages are consistent with: 1. package NVRs defined at the group dependency level 2. package NVRs defined at the rhcos dependency level 3. package NVRs of any RPMs built in this assembly/group :param rhcos_build: The RHCOS build to analyze. :return: Returns a (potentially empty) list of inconsistencies in the build. """ self.runtime.logger.info( f'Checking RHCOS build for consistency: {str(rhcos_build)}...') issues: List[AssemblyIssue] = [] required_packages: Dict[str, str] = dict( ) # Dict[package_name] -> nvr # Dependency specified in 'rhcos' in assembly definition desired_packages: Dict[str, str] = dict( ) # Dict[package_name] -> nvr # Dependency specified at group level el_tag = f'el{rhcos_build.get_rhel_base_version()}' for package_entry in (self.runtime.get_group_config().dependencies or []): if el_tag in package_entry: nvr = package_entry[el_tag] package_name = parse_nvr(nvr)['name'] desired_packages[package_name] = nvr for package_entry in (self.assembly_rhcos_config.dependencies or []): if el_tag in package_entry: nvr = package_entry[el_tag] package_name = parse_nvr(nvr)['name'] required_packages[package_name] = nvr desired_packages[ package_name] = nvr # Override if something else was at the group level installed_packages = rhcos_build.get_package_build_objects() for package_name, desired_nvr in desired_packages.items(): if package_name in required_packages and package_name not in installed_packages: # If the dependency is specified in the 'rhcos' section of the assembly, we must find it or raise an issue. # This is impermissible because it can simply be fixed in the assembly definition. issues.append( AssemblyIssue( f'Expected assembly defined rhcos dependency {desired_nvr} to be installed in {rhcos_build.build_id} but that package was not installed', component='rhcos')) if package_name in installed_packages: installed_build_dict = installed_packages[package_name] installed_nvr = installed_build_dict['nvr'] if installed_nvr != desired_nvr: # We could consider permitting this in AssemblyTypes.CUSTOM, but it means that the RHCOS build # could not be effectively reproduced by the rebuild job. issues.append( AssemblyIssue( f'Expected {desired_nvr} to be installed in RHCOS build {rhcos_build.build_id} but found {installed_nvr}', component='rhcos', code=AssemblyIssueCode. CONFLICTING_INHERITED_DEPENDENCY)) """ If the rhcos build has RPMs from this group installed, make sure they match the NVRs associated with this assembly. """ for dgk, assembly_rpm_build in self.get_group_rpm_build_dicts( el_ver=rhcos_build.get_rhel_base_version()).items(): if not assembly_rpm_build: continue package_name = assembly_rpm_build['package_name'] assembly_nvr = assembly_rpm_build['nvr'] if package_name in installed_packages: installed_nvr = installed_packages[package_name]['nvr'] if assembly_nvr != installed_nvr: # We could consider permitting this in AssemblyTypes.CUSTOM, but it means that the RHCOS build # could not be effectively reproduced by the rebuild job. issues.append( AssemblyIssue( f'Expected {rhcos_build.build_id}/{rhcos_build.brew_arch} image to contain assembly selected RPM build {assembly_nvr} but found {installed_nvr} installed', component='rhcos', code=AssemblyIssueCode. CONFLICTING_GROUP_RPM_INSTALLED)) return issues
def check_group_image_consistency( self, build_inspector: BrewBuildImageInspector) -> List[AssemblyIssue]: """ Evaluate the current assembly build and an image in the group and check whether they are consistent with :param build_inspector: The brew build to check :return: Returns a (potentially empty) list of reasons the image should be rebuilt. """ image_meta = build_inspector.get_image_meta() self.runtime.logger.info( f'Checking group image for consistency: {image_meta.distgit_key}...' ) issues: List[AssemblyIssue] = [] installed_packages = build_inspector.get_all_installed_package_build_dicts( ) dgk = build_inspector.get_image_meta().distgit_key """ If the assembly defined any RPM package dependencies at the group or image member level, we want to check to make sure that installed RPMs in the build image match the override package. If reading this, keep in mind that a single package/build may create several RPMs. Both assemblies and this method deal with the package level - not individual RPMs. """ member_package_overrides, all_package_overrides = image_meta.get_assembly_rpm_package_dependencies( el_ver=image_meta.branch_el_target()) if member_package_overrides or all_package_overrides: for package_name, required_nvr in all_package_overrides.items(): if package_name in member_package_overrides and package_name not in installed_packages: # A dependency was defined explicitly in an assembly member, but it is not installed. # i.e. the artists expected something to be installed, but it wasn't found in the final image. # Raise an issue. In rare circumstances the RPM may be used by early stage of the Dockerfile # and not in the final. In this case, it should be permitted in the assembly definition. issues.append( AssemblyIssue( f'Expected image to contain assembly member override dependencies NVR {required_nvr} but it was not installed', component=dgk, code=AssemblyIssueCode.MISSING_INHERITED_DEPENDENCY )) if package_name in installed_packages: installed_build_dict: Dict = installed_packages[ package_name] installed_nvr = installed_build_dict['nvr'] if required_nvr != installed_nvr: issues.append( AssemblyIssue( f'Expected image to contain assembly override dependencies NVR {required_nvr} but found {installed_nvr} installed', component=dgk, code=AssemblyIssueCode. CONFLICTING_INHERITED_DEPENDENCY)) """ If an image contains an RPM from the doozer group, make sure it is the current RPM for the assembly. """ el_ver = build_inspector.get_rhel_base_version() if el_ver: # We might not find an el_ver for an image (e.g. FROM scratch) for dgk, assembly_rpm_build in self.get_group_rpm_build_dicts( el_ver).items(): if not assembly_rpm_build: # The RPM doesn't claim to build for this image's RHEL base, so ignore it. continue package_name = assembly_rpm_build['package_name'] assembly_nvr = assembly_rpm_build['nvr'] if package_name in installed_packages: installed_nvr = installed_packages[package_name]['nvr'] if installed_nvr != assembly_nvr: issues.append( AssemblyIssue( f'Expected image to contain assembly RPM build {assembly_nvr} but found {installed_nvr} installed', component=dgk, code=AssemblyIssueCode. CONFLICTING_GROUP_RPM_INSTALLED)) """ Assess whether the image build has the upstream source git repo and git commit that may have been declared/ overridden in an assembly definition. """ content_git_url = image_meta.config.content.source.git.url if content_git_url: # Make sure things are in https form so we can compare content_git_url, _ = self.runtime.get_public_upstream( util.convert_remote_git_to_https(content_git_url)) build_git_url = util.convert_remote_git_to_https( build_inspector.get_source_git_url()) if content_git_url != build_git_url: # Impermissible as artist can just fix upstream git source in assembly definition issues.append( AssemblyIssue( f'Expected image git source from metadata {content_git_url} but found {build_git_url} as the upstream source of the brew build', component=dgk)) try: target_branch = image_meta.config.content.source.git.branch.target if target_branch: _ = int(target_branch, 16) # parse the name as a git commit # if we reach here, a git commit hash was declared as the # upstream source of the image's content. We should verify # it perfectly matches what we find in the assembly build. build_commit = build_inspector.get_source_git_commit() if target_branch != build_commit: # Impermissible as artist can just fix the assembly definition. issues.append( AssemblyIssue( f'Expected image build git commit {target_branch} but {build_commit} was found in the build', component=dgk)) except ValueError: # The meta's target branch a normal branch name # and not a git commit. When this is the case, # we don't try to assert anything about the build's # git commit. pass return issues
def check_group_rpm_package_consistency( self, rpm_meta: RPMMetadata) -> List[AssemblyIssue]: """ Evaluate the current assembly builds of RPMs in the group and check whether they are consistent with the assembly definition. :param rpm_meta: The rpm metadata to evaluate :return: Returns a (potentially empty) list of reasons the rpm should be rebuilt. """ self.runtime.logger.info( f'Checking group RPM for consistency: {rpm_meta.distgit_key}...') issues: List[AssemblyIssue] = [] for rpm_meta in self.runtime.rpm_metas(): dgk = rpm_meta.distgit_key for el_ver in rpm_meta.determine_rhel_targets(): brew_build_dict = self.get_group_rpm_build_dicts( el_ver=el_ver)[dgk] if not brew_build_dict: # Impermissible. The RPM should be built for each target. issues.append( AssemblyIssue( f'Did not find rhel-{el_ver} build for {dgk}', component=dgk)) continue """ Assess whether the image build has the upstream source git repo and git commit that may have been declared/ overridden in an assembly definition. """ content_git_url = rpm_meta.config.content.source.git.url if content_git_url: # Make sure things are in https form so we can compare # content_git_url = util.convert_remote_git_to_https(content_git_url) # TODO: The commit in which this comment is introduced also introduces # machine parsable yaml documents into distgit commits. Once this code # has been running for our active 4.x releases for some time, # we should check the distgit commit info against the git.url # in our metadata. try: target_branch = rpm_meta.config.content.source.git.branch.target if target_branch: _ = int(target_branch, 16) # parse the name as a git commit # if we reach here, a git commit hash was declared as the # upstream source of the rpm package's content. We should verify # it perfectly matches what we find in the assembly build. # Each package build gets git commits encoded into the # release field of the NVR. So the NVR should contain # the desired commit. build_nvr = brew_build_dict['nvr'] if target_branch[:7] not in build_nvr: # Impermissible because the assembly definition can simply be changed. issues.append( AssemblyIssue( f'{dgk} build for rhel-{el_ver} did not find git commit {target_branch[:7]} in package RPM NVR {build_nvr}', component=dgk)) except ValueError: # The meta's target branch a normal branch name # and not a git commit. When this is the case, # we don't try to assert anything about the build's # git commit. pass return issues
def release_gen_payload(runtime: Runtime, is_name: Optional[str], is_namespace: Optional[str], organization: Optional[str], repository: Optional[str], exclude_arch: Tuple[str, ...], skip_gc_tagging: bool, emergency_ignore_issues: bool): """Generates two sets of input files for `oc` commands to mirror content and update image streams. Files are generated for each arch defined in ocp-build-data for a version, as well as a final file for manifest-lists. One set of files are SRC=DEST mirroring definitions for 'oc image mirror'. They define what source images we will sync to which destination repos, and what the mirrored images will be labeled as. The other set of files are YAML image stream tags for 'oc apply'. Those are applied to an openshift cluster to define "release streams". When they are applied the release controller notices the update and begins generating a new payload with the images tagged in the image stream. For automation purposes this command generates a mirroring yaml files after the arch-specific files have been generated. The yaml files include names of generated content. You may provide the namespace and base name for the image streams, or defaults will be used. The generated files will append the -arch and -priv suffixes to the given name and namespace as needed. The ORGANIZATION and REPOSITORY options are combined into ORGANIZATION/REPOSITORY when preparing for mirroring. Generate files for mirroring from registry-proxy (OSBS storage) to our quay registry: \b $ doozer --group=openshift-4.2 release:gen-payload \\ --is-name=4.2-art-latest Note that if you use -i to include specific images, you should also include openshift-enterprise-cli to satisfy any need for the 'cli' tag. The cli image is used automatically as a stand-in for images when an arch does not build that particular tag. ## Validation ## Additionally we want to check that the following conditions are true for each imagestream being updated: * For all architectures built, RHCOS builds must have matching versions of any unshipped RPM they include (per-entry os metadata - the set of RPMs may differ between arches, but versions should not). * Any RPMs present in images (including machine-os-content) from unshipped RPM builds included in one of our candidate tags must exactly version-match the latest RPM builds in those candidate tags (ONLY; we never flag what we don't directly ship.) These checks (and likely more in the future) should run and any failures should be listed in brief via a "release.openshift.io/inconsistency" annotation on the relevant image istag (these are publicly visible; ref. https://bit.ly/37cseC1) and in more detail in state.yaml. The release-controller, per ART-2195, will read and propagate/expose this annotation in its display of the release image. """ runtime.initialize(mode='both', clone_distgits=False, clone_source=False, prevent_cloning=True) if runtime.assembly not in { None, "stream", "test" } and runtime.assembly not in runtime.releases_config.releases: raise DoozerFatalError( f"Assembly '{runtime.assembly}' is not explicitly defined.") logger = runtime.logger brew_session = runtime.build_retrying_koji_client() base_imagestream_name: str = is_name if is_name else assembly_imagestream_base_name( runtime) base_istream_namespace: str = is_namespace if is_namespace else default_imagestream_namespace_base_name( ) if runtime.assembly and runtime.assembly != 'stream' and 'art-latest' in base_imagestream_name: raise ValueError( 'The art-latest imagestreams should not be used for an assembly other than "stream"' ) logger.info( f'Collecting latest information associated with the assembly: {runtime.assembly}' ) assembly_inspector = AssemblyInspector(runtime, brew_session) logger.info('Checking for mismatched siblings...') mismatched_siblings = PayloadGenerator.find_mismatched_siblings( assembly_inspector.get_group_release_images().values()) # A list of strings that denote inconsistencies across all payloads generated assembly_issues: List[AssemblyIssue] = list() for mismatched_bbii, sibling_bbi in mismatched_siblings: mismatch_issue = AssemblyIssue( f'{mismatched_bbii.get_nvr()} was built from a different upstream source commit ({mismatched_bbii.get_source_git_commit()[:7]}) than one of its siblings {sibling_bbi.get_nvr()} from {sibling_bbi.get_source_git_commit()[:7]}', component=mismatched_bbii.get_image_meta().distgit_key, code=AssemblyIssueCode.MISMATCHED_SIBLINGS) assembly_issues.append(mismatch_issue) report = dict() report['non_release_images'] = [ image_meta.distgit_key for image_meta in runtime.get_non_release_image_metas() ] report['release_images'] = [ image_meta.distgit_key for image_meta in runtime.get_for_release_image_metas() ] report['missing_image_builds'] = [ dgk for (dgk, ii) in assembly_inspector.get_group_release_images().items() if ii is None ] # A list of metas where the assembly did not find a build if runtime.assembly_type is AssemblyTypes.STREAM: # Only nightlies have the concept of private and public payloads privacy_modes = [False, True] else: privacy_modes = [False] # Structure to record rhcos builds we use so that they can be analyzed for inconsistencies targeted_rhcos_builds: Dict[bool, List[RHCOSBuildInspector]] = { False: [], True: [] } """ Collect a list of builds we to tag in order to prevent garbage collection. Note: we also use this list to warm up caches, so don't wrap this section with `if not skip_gc_tagging`. To prevent garbage collection for custom assemblies (which won't normally be released via errata tool, triggering the traditional garbage collection prevention), we must tag these builds explicitly to prevent their GC. It is necessary to prevent GC, because we want to be able to build custom releases off of custom releases, and so on. If we loose images and builds for custom releases in brew due to garbage collection, we will not be able to construct derivative release payloads. """ assembly_build_ids: Set[int] = set( ) # This list of builds associated with the group/assembly will be used to warm up caches list_tags_tasks: Dict[Tuple[int, str], Any] = dict( ) # Maps (build_id, tag) tuple to multicall task to list tags with runtime.pooled_koji_client_session() as pcs: with pcs.multicall(strict=True) as m: for bbii in assembly_inspector.get_group_release_images().values(): if bbii: build_id = bbii.get_brew_build_id() assembly_build_ids.add( build_id) # Collect up build ids for cache warm up hotfix_tag = bbii.get_image_meta().hotfix_brew_tag() list_tags_tasks[(build_id, hotfix_tag)] = m.listTags(build=build_id) # RPMs can build for multiple versions of RHEL. For example, a single RPM # metadata can target 7 & 8. # For each rhel version targeted by our RPMs, build a list of RPMs # appropriate for the RHEL version with respect to the group/assembly. rhel_version_scanned_for_rpms: Dict[int, bool] = dict( ) # Maps rhel version -> bool indicating whether we have processed that rhel version for rpm_meta in runtime.rpm_metas(): for el_ver in rpm_meta.determine_rhel_targets(): if el_ver in rhel_version_scanned_for_rpms: # We've already processed this RHEL version. continue hotfix_tag = runtime.get_default_hotfix_brew_tag( el_target=el_ver) # Otherwise, query the assembly for this rhel version now. for dgk, rpm_build_dict in assembly_inspector.get_group_rpm_build_dicts( el_ver=el_ver).items(): if not rpm_build_dict: # RPM not built for this rhel version continue build_id = rpm_build_dict['id'] assembly_build_ids.add( build_id) # For cache warm up later. list_tags_tasks[(build_id, hotfix_tag)] = m.listTags( build=build_id) # Record that we are done for this rhel version. rhel_version_scanned_for_rpms[el_ver] = True # Tasks should now contain tag list information for all builds associated with this assembly. # and assembly_build_ids should contain ids for builds that should be cached. # We have a list of image and RPM builds associated with this assembly. # Tag them unless we have been told not to from the command line. if runtime.assembly_type != AssemblyTypes.STREAM and not skip_gc_tagging: with runtime.shared_koji_client_session() as koji_api: koji_api.gssapi_login() # Tagging requires authentication with koji_api.multicall() as m: for tup, list_tag_task in list_tags_tasks.items(): build_id = tup[0] desired_tag = tup[1] current_tags = [ tag_entry['name'] for tag_entry in list_tag_task.result ] if desired_tag not in current_tags: # The hotfix tag is missing, so apply it. runtime.logger.info( f'Adding tag {desired_tag} to build: {build_id} to prevent garbage collection.' ) m.tagBuild(desired_tag, build_id) with runtime.shared_build_status_detector() as bsd: bsd.populate_archive_lists(assembly_build_ids) bsd.find_shipped_builds(assembly_build_ids) """ Make sure that RPMs belonging to this assembly/group are consistent with the assembly definition. """ for rpm_meta in runtime.rpm_metas(): issues = assembly_inspector.check_group_rpm_package_consistency( rpm_meta) assembly_issues.extend(issues) """ If this is a stream assembly, images which are not using the latest builds should not reach the release controller. Other assemblies are meant to be constructed from non-latest. """ if runtime.assembly == 'stream': for dgk, build_inspector in assembly_inspector.get_group_release_images( ).items(): if build_inspector: non_latest_rpm_nvrs = build_inspector.find_non_latest_rpms() dgk = build_inspector.get_image_meta().distgit_key for installed_nvr, newest_nvr in non_latest_rpm_nvrs: # This indicates an issue with scan-sources or that an image is no longer successfully building. # Impermissible as this speaks to a potentially deeper issue of images not being rebuilt outdated_issue = AssemblyIssue( f'Found outdated RPM ({installed_nvr}) installed in {build_inspector.get_nvr()} when {newest_nvr} was available', component=dgk, code=AssemblyIssueCode.OUTDATED_RPMS_IN_STREAM_BUILD) assembly_issues.append( outdated_issue) # Add to overall issues """ Make sure image build selected by this assembly/group are consistent with the assembly definition. """ for dgk, bbii in assembly_inspector.get_group_release_images().items(): if bbii: issues = assembly_inspector.check_group_image_consistency(bbii) assembly_issues.extend(issues) for arch in runtime.arches: if arch in exclude_arch: logger.info(f'Excluding payload files architecture: {arch}') continue # Whether private or public, the assembly's canonical payload content is the same. entries: Dict[str, PayloadGenerator. PayloadEntry] = PayloadGenerator.find_payload_entries( assembly_inspector, arch, f'quay.io/{organization}/{repository}' ) # Key of this dict is release payload tag name for tag, payload_entry in entries.items(): if payload_entry.image_meta: # We already stored inconsistencies for each image_meta; look them up if there are any. payload_entry.issues.extend( filter( lambda ai: ai.component == payload_entry.image_meta. distgit_key, assembly_issues)) elif payload_entry.rhcos_build: assembly_issues.extend( assembly_inspector.check_rhcos_issues( payload_entry.rhcos_build)) payload_entry.issues.extend( filter(lambda ai: ai.component == 'rhcos', assembly_issues)) if runtime.assembly == 'stream': # For stream alone, we want to enforce that the very latest RPMs are installed. non_latest_rpm_nvrs = payload_entry.rhcos_build.find_non_latest_rpms( ) for installed_nvr, newest_nvr in non_latest_rpm_nvrs: assembly_issues.append( AssemblyIssue( f'Found outdated RPM ({installed_nvr}) installed in {payload_entry.rhcos_build} when {newest_nvr} is available', component='rhcos', code=AssemblyIssueCode. OUTDATED_RPMS_IN_STREAM_BUILD)) else: raise IOError(f'Unsupported PayloadEntry: {payload_entry}') # Save the default SRC=DEST input to a file for syncing by 'oc image mirror'. Why is # there no '-priv'? The true images for the assembly are what we are syncing - # it is what we update in the imagestream that defines whether the image will be # part of a public release. dests: Set[str] = set( ) # Prevents writing the same destination twice (not supported by oc) with io.open(f"src_dest.{arch}", "w+", encoding="utf-8") as out_file: for payload_entry in entries.values(): if not payload_entry.archive_inspector: # Nothing to mirror (e.g. machine-os-content) continue if payload_entry.dest_pullspec in dests: # Don't write the same destination twice. continue out_file.write( f"{payload_entry.archive_inspector.get_archive_pullspec()}={payload_entry.dest_pullspec}\n" ) dests.add(payload_entry.dest_pullspec) for private_mode in privacy_modes: logger.info( f'Building payload files for architecture: {arch}; private: {private_mode}' ) file_suffix = arch + '-priv' if private_mode else arch with io.open(f"image_stream.{file_suffix}.yaml", "w+", encoding="utf-8") as out_file: istags: List[Dict] = [] for payload_tag_name, payload_entry in entries.items(): if payload_entry.build_inspector and payload_entry.build_inspector.is_under_embargo( ) and private_mode is False: # Don't send this istag update to the public release controller continue istags.append( PayloadGenerator.build_payload_istag( payload_tag_name, payload_entry)) imagestream_name, imagestream_namespace = payload_imagestream_name_and_namespace( base_imagestream_name, base_istream_namespace, arch, private_mode) istream_spec = PayloadGenerator.build_payload_imagestream( imagestream_name, imagestream_namespace, istags, assembly_issues) yaml.safe_dump(istream_spec, out_file, indent=2, default_flow_style=False) # Now make sure that all of the RHCOS builds contain consistent RPMs for private_mode in privacy_modes: rhcos_builds = targeted_rhcos_builds[private_mode] rhcos_inconsistencies: Dict[ str, List[str]] = PayloadGenerator.find_rhcos_build_rpm_inconsistencies( rhcos_builds) if rhcos_inconsistencies: assembly_issues.append( AssemblyIssue( f'Found RHCOS inconsistencies in builds {targeted_rhcos_builds}: {rhcos_inconsistencies}', component='rhcos', code=AssemblyIssueCode.INCONSISTENT_RHCOS_RPMS)) # If the assembly claims to have reference nightlies, assert that our payload # matches them exactly. nightly_match_issues = PayloadGenerator.check_nightlies_consistency( assembly_inspector) if nightly_match_issues: assembly_issues.extend(nightly_match_issues) assembly_issues_report: Dict[str, List[Dict]] = dict() report['assembly_issues'] = assembly_issues_report overall_permitted = True for ai in assembly_issues: permitted = assembly_inspector.does_permit(ai) overall_permitted &= permitted # If anything is not permitted, exit with an error assembly_issues_report.setdefault(ai.component, []).append({ 'code': ai.code.name, 'msg': ai.msg, 'permitted': permitted }) report['viable'] = overall_permitted print(yaml.dump(report, default_flow_style=False, indent=2)) if not overall_permitted: red_print( 'DO NOT PROCEED WITH THIS ASSEMBLY PAYLOAD -- not all detected issues are permitted.', file=sys.stderr) if not emergency_ignore_issues: exit(1) exit(0)
def terminal_issue(msg: str) -> List[AssemblyIssue]: return [AssemblyIssue(msg, component='reference-releases')]
def _check_nightly_consistency(assembly_inspector: AssemblyInspector, nightly: str, arch: str) -> List[AssemblyIssue]: runtime = assembly_inspector.runtime def terminal_issue(msg: str) -> List[AssemblyIssue]: return [AssemblyIssue(msg, component='reference-releases')] issues: List[str] runtime.logger.info(f'Processing nightly: {nightly}') major_minor, brew_cpu_arch, priv = isolate_nightly_name_components( nightly) if major_minor != runtime.get_minor_version(): return terminal_issue( f'Specified nightly {nightly} does not match group major.minor' ) rc_suffix = go_suffix_for_arch(brew_cpu_arch, priv) retries: int = 3 release_json_str = '' rc = -1 pullspec = f'registry.ci.openshift.org/ocp{rc_suffix}/release{rc_suffix}:{nightly}' while retries > 0: rc, release_json_str, err = exectools.cmd_gather( f'oc adm release info {pullspec} -o=json') if rc == 0: break runtime.logger.warn( f'Error accessing nightly release info for {pullspec}: {err}') retries -= 1 if rc != 0: return terminal_issue( f'Unable to gather nightly release info details: {pullspec}; garbage collected?' ) release_info = Model(dict_to_model=json.loads(release_json_str)) if not release_info.references.spec.tags: return terminal_issue(f'Could not find tags in nightly {nightly}') issues: List[AssemblyIssue] = list() payload_entries: Dict[ str, PayloadGenerator. PayloadEntry] = PayloadGenerator.find_payload_entries( assembly_inspector, arch, '') for component_tag in release_info.references.spec.tags: # For each tag in the imagestream payload_tag_name: str = component_tag.name # e.g. "aws-ebs-csi-driver" payload_tag_pullspec: str = component_tag[ 'from'].name # quay pullspec if '@' not in payload_tag_pullspec: # This speaks to an invalid nightly, so raise and exception raise IOError( f'Expected pullspec in {nightly}:{payload_tag_name} to be sha digest but found invalid: {payload_tag_pullspec}' ) pullspec_sha = payload_tag_pullspec.rsplit('@', 1)[-1] entry = payload_entries.get(payload_tag_name, None) if not entry: raise IOError( f'Did not find {nightly} payload tag {payload_tag_name} in computed assembly payload' ) if entry.archive_inspector: if entry.archive_inspector.get_archive_digest( ) != pullspec_sha: # Impermissible because the artist should remove the reference nightlies from the assembly definition issues.append( AssemblyIssue( f'{nightly} contains {payload_tag_name} sha {pullspec_sha} but assembly computed archive: {entry.archive_inspector.get_archive_id()} and {entry.archive_inspector.get_archive_pullspec()}', component='reference-releases')) elif entry.rhcos_build: if entry.rhcos_build.get_machine_os_content_digest( ) != pullspec_sha: # Impermissible because the artist should remove the reference nightlies from the assembly definition issues.append( AssemblyIssue( f'{nightly} contains {payload_tag_name} sha {pullspec_sha} but assembly computed rhcos: {entry.rhcos_build} and {entry.rhcos_build.get_machine_os_content_digest()}', component='reference-releases')) else: raise IOError(f'Unsupported payload entry {entry}') return issues