def compare(pkgA, pkgB): pkgdictA = koji.parse_NVR(pkgA) pkgdictB = koji.parse_NVR(pkgB) rc = rpm.labelCompare((pkgdictA['epoch'], pkgdictA['version'], pkgdictA['release']), (pkgdictB['epoch'], pkgdictB['version'], pkgdictB['release'])) return rc
def compare(pkgA, pkgB): pkgdictA = koji.parse_NVR(pkgA) pkgdictB = koji.parse_NVR(pkgB) rc = rpm.labelCompare( (pkgdictA['epoch'], pkgdictA['version'], pkgdictA['release']), (pkgdictB['epoch'], pkgdictB['version'], pkgdictB['release'])) return rc
def _prepare_builds(self, db_event, bundles_by_digest, to_rebuild_digests): """ Prepare models.ArtifactBuild instance for every bundle that will be rebuilt :param models.Event db_event: database event that will contain builds :param dict bundles_by_digest: mapping of bundle digest to bundle data :param list to_rebuild_digests: digests of bundles to rebuild :return: builds that already in database and ready to be submitted to brew :rtype: list """ builds = [] csv_mod_url = conf.freshmaker_root_url + "/api/2/pullspec_overrides/{}" for digest in to_rebuild_digests: bundle = bundles_by_digest[digest] # Reset context to db_event for each iteration before # the ArtifactBuild is created. self.set_context(db_event) rebuild_reason = RebuildReason.DIRECTLY_AFFECTED.value bundle_name = koji.parse_NVR(bundle["nvr"])["name"] build = self.record_build(db_event, bundle_name, ArtifactType.IMAGE, state=ArtifactBuildState.PLANNED.value, original_nvr=bundle["nvr"], rebuild_reason=rebuild_reason) # Set context to particular build so logging shows this build # in case of error. self.set_context(build) build.transition(ArtifactBuildState.PLANNED.value, "") additional_data = ContainerImage.get_additional_data_from_koji( bundle["nvr"]) build.build_args = json.dumps({ "repository": additional_data["repository"], "commit": additional_data["commit"], "target": additional_data["target"], "branch": additional_data["git_branch"], "arches": additional_data["arches"], "operator_csv_modifications_url": csv_mod_url.format(build.id), }) build.bundle_pullspec_overrides = { "append": bundle["append"], "pullspecs": bundle["pullspecs"], "update": bundle["update"], } db.session.commit() builds.append(build) return builds
def getRepo(self, tag, builds=None, wait=False): """ Get the active repo for the given tag. If there is no repo available, wait for a repo to be created. if wait is True - always wait for new repo if builds are present, wait until repo doesn't contain these """ if wait: create_ts = time.time() else: create_ts = None repo_info = self.session.getRepo(tag) taginfo = self.session.getTag(tag, strict=True) if not repo_info: # make sure there is a target targets = self.session.getBuildTargets(buildTagID=taginfo['id']) if not targets: raise koji.BuildError('no repo (and no target) for tag %s' % taginfo['name']) wait = True elif builds: build_infos = [koji.parse_NVR(build) for build in builds] if not koji.util.checkForBuilds(self.session, taginfo['id'], build_infos, repo_info['create_event']): wait = True if wait: task_id = self.session.host.subtask(method='waitrepo', arglist=[tag, create_ts, builds], parent=self.id) repo_info = self.wait(task_id)[task_id] return repo_info
def test_checkForBuilds(self): """Test checkForBuilds function""" builds = [koji.parse_NVR("pkg-1-r1"), koji.parse_NVR("pkg-1-r2"), koji.parse_NVR("pkg-1.1-r1")] latest_builds = [koji.parse_NVR("pkg-1.1-r1")] session = mock.MagicMock() session.getLatestBuilds = mock.Mock(return_value=latest_builds) session.listTagged = mock.Mock(return_value=builds) event = mock.MagicMock() # latest bit check self.assertTrue(koji.util.checkForBuilds( session, 'fedora', (koji.parse_NVR('pkg-1.1-r1'),), event, latest=True)) self.assertFalse(koji.util.checkForBuilds( session, 'fedora', (koji.parse_NVR('pkg-1.0-r2'),), event, latest=True)) # all elemnts in builds should exist. for b in builds: self.assertTrue( koji.util.checkForBuilds(session, "pkg-build", (b,), event)) # non exist build test. self.assertEqual(False, koji.util.checkForBuilds( session, "pkg-build", (koji.parse_NVR("pkg-1.0-r1"),), event))
def test_parse_NVR(self): """Test the parse_NVR method""" self.assertRaises(AttributeError, koji.parse_NVR, None) self.assertRaises(AttributeError, koji.parse_NVR, 1) self.assertRaises(AttributeError, koji.parse_NVR, {}) self.assertRaises(AttributeError, koji.parse_NVR, []) self.assertRaises(koji.GenericError, koji.parse_NVR, "") self.assertRaises(koji.GenericError, koji.parse_NVR, "foo") self.assertRaises(koji.GenericError, koji.parse_NVR, "foo-1") self.assertRaises(koji.GenericError, koji.parse_NVR, "foo-1-") self.assertRaises(koji.GenericError, koji.parse_NVR, "foo--1") self.assertRaises(koji.GenericError, koji.parse_NVR, "--1") ret = koji.parse_NVR("foo-1-2") self.assertEqual(ret['name'], "foo") self.assertEqual(ret['version'], "1") self.assertEqual(ret['release'], "2") self.assertEqual(ret['epoch'], "") ret = koji.parse_NVR("12:foo-1-2") self.assertEqual(ret['name'], "foo") self.assertEqual(ret['version'], "1") self.assertEqual(ret['release'], "2") self.assertEqual(ret['epoch'], "12")
def _get_compose_source(self, nvr): """Get tag from which to collect packages to compose :param str nvr: build NVR used to find correct tag. :return: found tag. None is returned if build is not the latest build of found tag. :rtype: str """ with koji_service(conf.koji_profile, log, dry_run=self.handler.dry_run) as service: # Get the list of *-candidate tags, because packages added into # Errata should be tagged into -candidate tag. tags = service.session.listTags(nvr) candidate_tags = [ tag['name'] for tag in tags if tag['name'].endswith('-candidate') ] # Candidate tags may include unsigned packages and ODCS won't # allow generating compose from them, so try to find out final # version of candidate tag (without the "-candidate" suffix). final_tags = [] for candidate_tag in candidate_tags: final = candidate_tag[:-len("-candidate")] final_tags += [ tag['name'] for tag in tags if tag['name'] == final ] # Prefer final tags over candidate tags. tags_to_try = final_tags + candidate_tags for tag in tags_to_try: latest_build = service.session.listTagged( tag, latest=True, package=koji.parse_NVR(nvr)['name']) if latest_build and latest_build[0]['nvr'] == nvr: self.handler.log_info( "Package %r is latest version in tag %r, " "will use this tag", nvr, tag) return tag elif not latest_build: self.handler.log_info( "Could not find package %r in tag %r, " "skipping this tag", nvr, tag) else: self.handler.log_info( "Package %r is not he latest in the tag %r (" "latest is %r), skipping this tag", nvr, tag, latest_build[0]['nvr'])
def main(): args = parse_args() nvrs = [koji.parse_NVR(nvr) for nvr in args.nvrs] rsession = Session() exit_code = 0 for nvr in nvrs: if not args.q: display_title(nvr) rsjson = get_resources_url(args.profile, nvr) r = requests.get(rsjson) if r.status_code == 200: js = json.loads(r.content) print(js['repo']) print(js['ref']) elif args.q: print(r.status_code) else: print('Response code: %s, reason: %s' % (r.status_code, r.reason))
def _filter_out_not_allowed_builds(self, image): """ Helper method for _find_images_to_rebuild(...) to filter out all images which are not allowed to build by configuration. :param ContainerImage image: Image to be checked. :rtype: bool :return: True when image should be filtered out. """ parsed_nvr = koji.parse_NVR(image.nvr) if not self.event.is_allowed(self, image_name=parsed_nvr["name"], image_version=parsed_nvr["version"], image_release=parsed_nvr["release"]): self.log_info( "Skipping rebuild of image %s, not allowed by configuration", image.nvr) return True return False
def get_rebuilt_nvr(artifact_type, nvr): """ Returns the new NVR of artifact which should be used when rebuilding the artifact. :param ArtifactType artifact_type: Type of the rebuilt artifact. :param str nvr: Original NVR of artifact. :rtype: str :return: newly generated NVR """ rebuilt_nvr = None if artifact_type == ArtifactType.IMAGE.value: # Set release from XX.YY to XX.$timestamp$release_suffix parsed_nvr = koji.parse_NVR(nvr) r_version = parsed_nvr["release"].split(".")[0] release = f"{r_version}.{int(time.time())}{conf.rebuilt_nvr_release_suffix}" rebuilt_nvr = "%s-%s-%s" % (parsed_nvr["name"], parsed_nvr["version"], release) return rebuilt_nvr
command_nvr = "curl --user ':'******'.') json_result = json.loads(json_output) key = json_result.keys()[0] nvrs = [] names = [] for item in json_result[key]: build = item.keys()[0] nvrs.append(build) name = koji.parse_NVR(build)['name'] names.append(name) print('----------------------') print("Nvrs in errata:") print('----------------------') print_stuff(sorted(nvrs)) print('') print('') print('') print('----------------------') print("Build names in errata:") print('----------------------') print_stuff(sorted(names))
def _record_batches(self, batches, event, builds=None): """ Records the images from batches to database. :param batches list: Output of LightBlue._find_images_to_rebuild(...). :param event ErrataAdvisoryRPMsSignedEvent: The event this handler is currently handling. :param builds dict: mappings from docker image build NVR to corresponding ArtifactBuild object, e.g. ``{brew_build_nvr: ArtifactBuild, ...}``. Previous builds returned from this method can be passed to this call to be extended by adding a new mappings after docker image is stored into database. For the first time to call this method, builds could be None. :return: a mapping between docker image build NVR and corresponding ArtifactBuild object representing a future rebuild of that docker image. It is extended by including those docker images stored into database. :rtype: dict """ db_event = Event.get_or_create_from_event(db.session, event) # Used as tmp dict with {brew_build_nvr: ArtifactBuild, ...} mapping. builds = builds or {} # Cache for ODCS pulp composes. Key is white-spaced, sorted, list # of content_sets. Value is Compose database object. odcs_cache = {} for batch in batches: for image in batch: # Reset context to db_event for each iteration before # the ArtifactBuild is created. self.set_context(db_event) nvr = image.nvr if nvr in builds: self.log_debug( "Skipping recording build %s, " "it is already in db", nvr) continue parent_build = db_event.get_artifact_build_from_event_dependencies( nvr) if parent_build: self.log_debug( "Skipping recording build %s, " "it is already built in dependant event %r", nvr, parent_build[0].event_id) continue self.log_debug("Recording %s", nvr) parent_nvr = image["parent"].nvr \ if "parent" in image and image["parent"] else None dep_on = builds[parent_nvr] if parent_nvr in builds else None if parent_nvr: build = db_event.get_artifact_build_from_event_dependencies( parent_nvr) if build: parent_nvr = build[0].rebuilt_nvr dep_on = None if "error" in image and image["error"]: state_reason = image["error"] state = ArtifactBuildState.FAILED.value elif dep_on and dep_on.state == ArtifactBuildState.FAILED.value: # If this artifact build depends on a build which cannot # be built by Freshmaker, mark this one as failed too. state_reason = "Cannot build artifact, because its " \ "dependency cannot be built." state = ArtifactBuildState.FAILED.value else: state_reason = "" state = ArtifactBuildState.PLANNED.value image_name = koji.parse_NVR(image.nvr)["name"] # Only released images are considered as directly affected for # rebuild. If some image is not in the latest released version and # it is included in a rebuild, it must be just a dependency of # other image. if image.get('directly_affected'): rebuild_reason = RebuildReason.DIRECTLY_AFFECTED.value else: rebuild_reason = RebuildReason.DEPENDENCY.value build = self.record_build( event, image_name, ArtifactType.IMAGE, dep_on=dep_on, state=ArtifactBuildState.PLANNED.value, original_nvr=nvr, rebuild_reason=rebuild_reason) # Set context to particular build so logging shows this build # in case of error. self.set_context(build) build.transition(state, state_reason) build.build_args = json.dumps({ "repository": image["repository"], "commit": image["commit"], "original_parent": parent_nvr, "target": image["target"], "branch": image["git_branch"], "arches": image["arches"], "renewed_odcs_compose_ids": image["odcs_compose_ids"], }) db.session.commit() if state != ArtifactBuildState.FAILED.value: # Store odcs pulp compose to build. # Also generate pulp repos in case the image is unpublished, # because in this case, we have to generate extra ODCS compose # with all the RPMs in the image anyway later. And OSBS works # in a way that we have to pass all the ODCS composes to it or # no ODCS compose at all. if image["generate_pulp_repos"] or not image["published"]: # Check if the compose for these content_sets is # already cached and use it in this case. cache_key = " ".join(sorted(image["content_sets"])) if cache_key in odcs_cache: db_compose = odcs_cache[cache_key] else: compose = self.odcs.prepare_pulp_repo( build, image["content_sets"]) if build.state != ArtifactBuildState.FAILED.value: db_compose = Compose( odcs_compose_id=compose['id']) db.session.add(db_compose) db.session.commit() odcs_cache[cache_key] = db_compose else: db_compose = None db.session.commit() if db_compose: build.add_composes(db.session, [db_compose]) db.session.commit() # Unpublished images can contain unreleased RPMs, so generate # the ODCS compose with all the RPMs in the image to allow # installation of possibly unreleased RPMs. if not image["published"]: compose = self.odcs.prepare_odcs_compose_with_image_rpms( image) if compose: db_compose = Compose(odcs_compose_id=compose['id']) db.session.add(db_compose) db.session.commit() build.add_composes(db.session, [db_compose]) db.session.commit() builds[nvr] = build # Reset context to db_event. self.set_context(db_event) return builds
def printBuild(build): pkgdict = koji.parse_NVR(build) return '%s-%s-%s' % (pkgdict['name'], pkgdict['version'], pkgdict['release'])
def setUp(self): self.topdir = tempfile.mkdtemp() self.rinfo = { 'create_event': 2915, 'create_ts': 1487256924.72718, 'creation_time': '2017-02-16 14:55:24.727181', 'id': 47, 'state': 1, 'tag_id': 2, 'tag_name': 'my-tag' } self.arch = 'x86_64' # set up a fake koji topdir # koji.pathinfo._topdir = self.topdir mock.patch('koji.pathinfo._topdir', new=self.topdir).start() repodir = koji.pathinfo.distrepo(self.rinfo['id'], self.rinfo['tag_name']) archdir = "%s/%s" % (repodir, koji.canonArch(self.arch)) os.makedirs(archdir) self.uploadpath = 'UNITTEST' workdir = koji.pathinfo.work() uploaddir = "%s/%s" % (workdir, self.uploadpath) os.makedirs(uploaddir) # place some test files self.files = ['foo.drpm', 'repomd.xml'] self.expected = ['x86_64/drpms/foo.drpm', 'x86_64/repodata/repomd.xml'] for fn in self.files: path = os.path.join(uploaddir, fn) koji.ensuredir(os.path.dirname(path)) with open(path, 'w') as fo: fo.write('%s' % fn) # generate pkglist file and sigmap self.files.append('pkglist') plist = os.path.join(uploaddir, 'pkglist') nvrs = ['aaa-1.0-2', 'bbb-3.0-5', 'ccc-8.0-13', 'ddd-21.0-34'] self.sigmap = [] self.rpms = {} self.builds = {} self.key = '4c8da725' with open(plist, 'w') as f_pkglist: for nvr in nvrs: binfo = koji.parse_NVR(nvr) rpminfo = binfo.copy() rpminfo['arch'] = 'x86_64' builddir = koji.pathinfo.build(binfo) relpath = koji.pathinfo.signed(rpminfo, self.key) path = os.path.join(builddir, relpath) koji.ensuredir(os.path.dirname(path)) basename = os.path.basename(path) with open(path, 'w') as fo: fo.write('%s' % basename) f_pkglist.write(path) f_pkglist.write('\n') self.expected.append('x86_64/%s/%s' % (basename[0], basename)) build_id = len(self.builds) + 10000 rpm_id = len(self.rpms) + 20000 binfo['id'] = build_id rpminfo['build_id'] = build_id rpminfo['id'] = rpm_id self.builds[build_id] = binfo self.rpms[rpm_id] = rpminfo self.sigmap.append([rpm_id, self.key]) # mocks self.repo_info = mock.patch('kojihub.repo_info').start() self.repo_info.return_value = self.rinfo.copy() self.get_rpm = mock.patch('kojihub.get_rpm').start() self.get_build = mock.patch('kojihub.get_build').start() self.get_rpm.side_effect = self.our_get_rpm self.get_build.side_effect = self.our_get_build
def _record_batches(self, batches, db_event, lb): """ Records the images from batches to the database. :param batches list: Output of LightBlue._find_images_to_rebuild(...). :param db_event: event to handle. :param lb LightBlue: LightBlue instance :return: a mapping between image build NVR and corresponding ArtifactBuild object representing a future rebuild of that. It is extended by including those images stored into database. :rtype: dict """ # builds tracks all the builds we register in db builds = {} for batch in batches: for image in batch: # Reset context to db_event for each iteration before # the ArtifactBuild is created. self.set_context(db_event) nvr = image["brew"]["build"] self.log_debug("Recording %s", nvr) parent_nvr = image["parent"].nvr \ if "parent" in image and image["parent"] else None if "error" in image and image["error"]: state_reason = image["error"] state = ArtifactBuildState.FAILED.value else: state_reason = "" state = ArtifactBuildState.PLANNED.value image_name = koji.parse_NVR(image["brew"]["build"])["name"] parent_nvr = image["parent"].nvr \ if "parent" in image and image["parent"] else None dep_on = builds[parent_nvr] if parent_nvr in builds else None # We don't need to rebuild the nvr this time. The release value # will be automatically generated by OSBS. build = self.record_build( self.event, image_name, ArtifactType.IMAGE, dep_on=dep_on, state=ArtifactBuildState.PLANNED.value, original_nvr=nvr) # Set context to particular build so logging shows this build # in case of error. self.set_context(build) image.resolve(lb) build.transition(state, state_reason) build_args = {} build_args["repository"] = image['repository'] build_args["commit"] = image["commit"] build_args["target"] = (self.event.brew_target if self.event.brew_target else image["target"]) build_args["branch"] = image["git_branch"] build_args["original_parent"] = parent_nvr, build_args["arches"] = image["arches"] build.build_args = json.dumps(build_args) db.session.commit() builds[nvr] = build # Reset context to db_event. self.set_context(db_event) return builds
def setUp(self): self.topdir = tempfile.mkdtemp() self.rinfo = { 'create_event': 2915, 'create_ts': 1487256924.72718, 'creation_time': '2017-02-16 14:55:24.727181', 'id': 47, 'state': 0, # INIT 'tag_id': 2, 'tag_name': 'my-tag'} self.arch = 'x86_64' # set up a fake koji topdir # koji.pathinfo._topdir = self.topdir mock.patch('koji.pathinfo._topdir', new=self.topdir).start() repodir = koji.pathinfo.distrepo(self.rinfo['id'], self.rinfo['tag_name']) archdir = "%s/%s" % (repodir, koji.canonArch(self.arch)) os.makedirs(archdir) self.uploadpath = 'UNITTEST' workdir = koji.pathinfo.work() uploaddir = "%s/%s" % (workdir, self.uploadpath) os.makedirs(uploaddir) # place some test files self.files = ['drpms/foo.drpm', 'repodata/repomd.xml'] self.expected = ['x86_64/drpms/foo.drpm', 'x86_64/repodata/repomd.xml'] for fn in self.files: path = os.path.join(uploaddir, fn) koji.ensuredir(os.path.dirname(path)) with open(path, 'w') as fo: fo.write('%s' % os.path.basename(fn)) # generate pkglist file self.files.append('pkglist') plist = os.path.join(uploaddir, 'pkglist') nvrs = ['aaa-1.0-2', 'bbb-3.0-5', 'ccc-8.0-13','ddd-21.0-34'] self.rpms = {} self.builds ={} self.key = '4c8da725' with open(plist, 'w') as f_pkglist: for nvr in nvrs: binfo = koji.parse_NVR(nvr) rpminfo = binfo.copy() rpminfo['arch'] = 'x86_64' builddir = koji.pathinfo.build(binfo) relpath = koji.pathinfo.signed(rpminfo, self.key) path = os.path.join(builddir, relpath) koji.ensuredir(os.path.dirname(path)) basename = os.path.basename(path) with open(path, 'w') as fo: fo.write('%s' % basename) f_pkglist.write(path) f_pkglist.write('\n') self.expected.append('x86_64/Packages/%s/%s' % (basename[0], basename)) build_id = len(self.builds) + 10000 rpm_id = len(self.rpms) + 20000 binfo['id'] = build_id rpminfo['build_id'] = build_id rpminfo['id'] = rpm_id rpminfo['sigkey'] = self.key rpminfo['size'] = 1024 rpminfo['payloadhash'] = 'helloworld' self.builds[build_id] = binfo self.rpms[rpm_id] = rpminfo # write kojipkgs kojipkgs = {} for rpminfo in self.rpms.values(): bnp = '%(name)s-%(version)s-%(release)s.%(arch)s.rpm' % rpminfo kojipkgs[bnp] = rpminfo with open("%s/kojipkgs" % uploaddir, "w") as fp: json.dump(kojipkgs, fp, indent=4) self.files.append('kojipkgs') # write manifest with open("%s/repo_manifest" % uploaddir, "w") as fp: json.dump(self.files, fp, indent=4) # mocks self.repo_info = mock.patch('kojihub.repo_info').start() self.repo_info.return_value = self.rinfo.copy() self.get_rpm = mock.patch('kojihub.get_rpm').start() self.get_build = mock.patch('kojihub.get_build').start() self.get_rpm.side_effect = self.our_get_rpm self.get_build.side_effect = self.our_get_build
def setUp(self): self.topdir = tempfile.mkdtemp() self.rinfo = { 'create_event': 2915, 'create_ts': 1487256924.72718, 'creation_time': '2017-02-16 14:55:24.727181', 'id': 47, 'state': 0, # INIT 'tag_id': 2, 'tag_name': 'my-tag' } self.arch = 'x86_64' # set up a fake koji topdir # koji.pathinfo._topdir = self.topdir mock.patch('koji.pathinfo._topdir', new=self.topdir).start() repodir = koji.pathinfo.distrepo(self.rinfo['id'], self.rinfo['tag_name']) archdir = "%s/%s" % (repodir, koji.canonArch(self.arch)) os.makedirs(archdir) self.uploadpath = 'UNITTEST' workdir = koji.pathinfo.work() uploaddir = "%s/%s" % (workdir, self.uploadpath) os.makedirs(uploaddir) # place some test files self.files = ['drpms/foo.drpm', 'repodata/repomd.xml'] self.expected = ['x86_64/drpms/foo.drpm', 'x86_64/repodata/repomd.xml'] for fn in self.files: path = os.path.join(uploaddir, fn) koji.ensuredir(os.path.dirname(path)) with open(path, 'w') as fo: fo.write('%s' % os.path.basename(fn)) # generate pkglist file self.files.append('pkglist') plist = os.path.join(uploaddir, 'pkglist') nvrs = ['aaa-1.0-2', 'bbb-3.0-5', 'ccc-8.0-13', 'ddd-21.0-34'] self.rpms = {} self.builds = {} self.key = '4c8da725' with open(plist, 'w') as f_pkglist: for nvr in nvrs: binfo = koji.parse_NVR(nvr) rpminfo = binfo.copy() rpminfo['arch'] = 'x86_64' builddir = koji.pathinfo.build(binfo) relpath = koji.pathinfo.signed(rpminfo, self.key) path = os.path.join(builddir, relpath) koji.ensuredir(os.path.dirname(path)) basename = os.path.basename(path) with open(path, 'w') as fo: fo.write('%s' % basename) f_pkglist.write(path) f_pkglist.write('\n') self.expected.append('x86_64/Packages/%s/%s' % (basename[0], basename)) build_id = len(self.builds) + 10000 rpm_id = len(self.rpms) + 20000 binfo['id'] = build_id rpminfo['build_id'] = build_id rpminfo['id'] = rpm_id rpminfo['sigkey'] = self.key rpminfo['size'] = 1024 rpminfo['payloadhash'] = 'helloworld' self.builds[build_id] = binfo self.rpms[rpm_id] = rpminfo # write kojipkgs kojipkgs = {} for rpminfo in self.rpms.values(): bnp = '%(name)s-%(version)s-%(release)s.%(arch)s.rpm' % rpminfo kojipkgs[bnp] = rpminfo with open("%s/kojipkgs" % uploaddir, "w") as fp: json.dump(kojipkgs, fp, indent=4) self.files.append('kojipkgs') # write manifest with open("%s/repo_manifest" % uploaddir, "w") as fp: json.dump(self.files, fp, indent=4) # mocks self.repo_info = mock.patch('kojihub.repo_info').start() self.repo_info.return_value = self.rinfo.copy() self.get_rpm = mock.patch('kojihub.get_rpm').start() self.get_build = mock.patch('kojihub.get_build').start() self.get_rpm.side_effect = self.our_get_rpm self.get_build.side_effect = self.our_get_build
def _prepare_builds(self, db_event, to_rebuild_bundles): """ Prepare models.ArtifactBuild instance for every bundle that will be rebuilt :param models.Event db_event: database event that will contain builds :param list to_rebuild_bundles: bundles to rebuild :return: builds that already in database and ready to be submitted to brew :rtype: list """ builds = [] csv_mod_url = conf.freshmaker_root_url + "/api/2/pullspec_overrides/{}" for bundle in to_rebuild_bundles: # Reset context to db_event for each iteration before # the ArtifactBuild is created. self.set_context(db_event) rebuild_reason = RebuildReason.DIRECTLY_AFFECTED.value bundle_name = koji.parse_NVR(bundle["nvr"])["name"] build = self.record_build(db_event, bundle_name, ArtifactType.IMAGE, state=ArtifactBuildState.PLANNED.value, original_nvr=bundle["nvr"], rebuild_reason=rebuild_reason) # Set context to particular build so logging shows this build # in case of error. self.set_context(build) build.transition(ArtifactBuildState.PLANNED.value, "") additional_data = ContainerImage.get_additional_data_from_koji( bundle["nvr"]) build.build_args = json.dumps({ "repository": additional_data["repository"], "commit": additional_data["commit"], "target": additional_data["target"], "branch": additional_data["git_branch"], "arches": additional_data["arches"], # The build system always enforces that bundle images build from # "scratch", so there is no parent image. See: # https://osbs.readthedocs.io/en/latest/users.html?#operator-manifest-bundle-builds "original_parent": None, "operator_csv_modifications_url": csv_mod_url.format(build.id), }) build.bundle_pullspec_overrides = { "pullspec_replacements": bundle["pullspec_replacements"], "update": bundle["update"], } db.session.commit() builds.append(build) return builds