def setUp(self): super(TestArtifactBuildComposesRel, self). setUp() self.compose_1 = Compose(odcs_compose_id=-1) self.compose_2 = Compose(odcs_compose_id=2) self.compose_3 = Compose(odcs_compose_id=3) self.compose_4 = Compose(odcs_compose_id=4) db.session.add(self.compose_1) db.session.add(self.compose_2) db.session.add(self.compose_3) db.session.add(self.compose_4) self.event = Event.create( db.session, 'msg-1', 'search-key-1', EVENT_TYPES[ErrataAdvisoryRPMsSignedEvent], state=EventState.INITIALIZED, released=False) self.build_1 = ArtifactBuild.create( db.session, self.event, 'build-1', ArtifactType.IMAGE) self.build_1.build_id = 3 self.build_2 = ArtifactBuild.create( db.session, self.event, 'build-2', ArtifactType.IMAGE) self.build_2.build_id = -2 self.build_3 = ArtifactBuild.create( db.session, self.event, 'build-3', ArtifactType.IMAGE) self.build_3.build_id = None db.session.commit() rels = ( (self.build_1.id, self.compose_1.id), (self.build_1.id, self.compose_2.id), (self.build_1.id, self.compose_3.id), (self.build_2.id, self.compose_2.id), (self.build_2.id, self.compose_4.id), ) for build_id, compose_id in rels: db.session.add( ArtifactBuildCompose( build_id=build_id, compose_id=compose_id)) db.session.commit()
def prepare_yum_repos_for_rebuilds(self, db_event): repo_urls = [] db_composes = [] compose = self.prepare_yum_repo(db_event) db_composes.append(Compose(odcs_compose_id=compose['id'])) db.session.add(db_composes[-1]) repo_urls.append(compose['result_repofile']) for dep_event in db_event.find_dependent_events(): compose = self.prepare_yum_repo(dep_event) db_composes.append(Compose(odcs_compose_id=compose['id'])) db.session.add(db_composes[-1]) repo_urls.append(compose['result_repofile']) # commit all new composes db.session.commit() for build in db_event.builds: build.add_composes(db.session, db_composes) db.session.commit() # Remove duplicates from repo_urls. return list(set(repo_urls))
def _fake_odcs_new_compose(self, compose_source, tag, packages=None, results=None, builds=None, arches=None): """ Fake odcs.new_compose(...) method used in the dry run mode. Logs the arguments and emits fake ODCSComposeStateChangeEvent :rtype: dict :return: Fake odcs.new_compose dict. """ self.handler.log_info( "DRY RUN: Calling fake odcs.new_compose with args: %r", (compose_source, tag, packages, results, arches)) # In case we run in DRY_RUN mode, we need to initialize # FAKE_COMPOSE_ID to the id of last ODCS compose to give the IDs # increasing and unique even between Freshmaker restarts. fake_compose_id = Compose.get_lowest_compose_id(db.session) - 1 if fake_compose_id >= 0: fake_compose_id = -1 new_compose = { 'id': fake_compose_id, 'result_repofile': "http://localhost/{}.repo".format(fake_compose_id), 'state': COMPOSE_STATES['done'], 'results': results or ['boot.iso'] } if builds: new_compose['builds'] = builds if arches: new_compose['arches'] = arches # Generate and inject the ODCSComposeStateChangeEvent event. event = ODCSComposeStateChangeEvent("fake_compose_msg", new_compose) event.dry_run = True self.handler.log_info("Injecting fake event: %r", event) work_queue_put(event) return new_compose
def test_prepare_odcs_compose_with_image_rpms_dry_run( self, global_consumer): consumer = self.create_consumer() global_consumer.return_value = consumer image = self._get_fake_container_image() # Run multiple times, so we can verify that id of fake compose is set # properly and is not repeating. for i in range(1, 3): handler = MyHandler() handler.force_dry_run() compose = handler.odcs.prepare_odcs_compose_with_image_rpms(image) db_compose = Compose(odcs_compose_id=compose['id']) db.session.add(db_compose) db.session.commit() self.assertEqual(-i, compose['id']) event = consumer.incoming.get() self.assertEqual(event.msg_id, "fake_compose_msg")
def _create_test_event(self, event_id, search_key, build_name, compose_id): db_event = Event.create(db.session, event_id, search_key, EVENT_TYPES[ErrataAdvisoryRPMsSignedEvent], state=EventState.INITIALIZED, released=False) build_1 = ArtifactBuild.create(db.session, db_event, build_name, ArtifactType.IMAGE, state=ArtifactBuildState.PLANNED) compose_1 = Compose(odcs_compose_id=compose_id) db.session.add(compose_1) db.session.commit() db.session.add( ArtifactBuildCompose(build_id=build_1.id, compose_id=compose_1.id)) db.session.commit() return db_event
def setUp(self): super(TestGetRepoURLs, self).setUp() self.compose_1 = Compose(odcs_compose_id=5) self.compose_2 = Compose(odcs_compose_id=6) self.compose_3 = Compose(odcs_compose_id=7) self.compose_4 = Compose(odcs_compose_id=8) db.session.add(self.compose_1) db.session.add(self.compose_2) db.session.add(self.compose_3) db.session.add(self.compose_4) self.event = Event.create(db.session, 'msg-1', 'search-key-1', EVENT_TYPES[ErrataAdvisoryRPMsSignedEvent], state=EventState.BUILDING, released=False) build_args = {} build_args["repository"] = "repo" build_args["commit"] = "hash" build_args["original_parent"] = None build_args["target"] = "target" build_args["branch"] = "branch" build_args["arches"] = "x86_64" build_args["renewed_odcs_compose_ids"] = None self.build_1 = ArtifactBuild.create(db.session, self.event, 'build-1', ArtifactType.IMAGE, state=ArtifactBuildState.PLANNED, original_nvr="foo-1-2") self.build_1.build_args = json.dumps(build_args) self.build_2 = ArtifactBuild.create(db.session, self.event, 'build-2', ArtifactType.IMAGE, state=ArtifactBuildState.PLANNED, original_nvr="foo-2-2") self.build_2.build_args = json.dumps(build_args) db.session.commit() rels = ( (self.build_1.id, self.compose_1.id), (self.build_1.id, self.compose_2.id), (self.build_1.id, self.compose_3.id), (self.build_1.id, self.compose_4.id), ) for build_id, compose_id in rels: db.session.add( ArtifactBuildCompose(build_id=build_id, compose_id=compose_id)) db.session.commit() def mocked_odcs_get_compose(compose_id): return { "id": compose_id, "result_repofile": "http://localhost/%d.repo" % compose_id, "state": COMPOSE_STATES["done"], } self.patch_odcs_get_compose = patch( "freshmaker.handlers.ContainerBuildHandler.odcs_get_compose", side_effect=mocked_odcs_get_compose) self.odcs_get_compose = self.patch_odcs_get_compose.start()
def _record_batches(self, batches, event, builds=None): """ Records the images from batches to database. :param batches list: Output of LightBlue._find_images_to_rebuild(...). :param event ErrataAdvisoryRPMsSignedEvent: The event this handler is currently handling. :param builds dict: mappings from docker image build NVR to corresponding ArtifactBuild object, e.g. ``{brew_build_nvr: ArtifactBuild, ...}``. Previous builds returned from this method can be passed to this call to be extended by adding a new mappings after docker image is stored into database. For the first time to call this method, builds could be None. :return: a mapping between docker image build NVR and corresponding ArtifactBuild object representing a future rebuild of that docker image. It is extended by including those docker images stored into database. :rtype: dict """ db_event = Event.get_or_create_from_event(db.session, event) # Used as tmp dict with {brew_build_nvr: ArtifactBuild, ...} mapping. builds = builds or {} # Cache for ODCS pulp composes. Key is white-spaced, sorted, list # of content_sets. Value is Compose database object. odcs_cache = {} for batch in batches: for image in batch: # Reset context to db_event for each iteration before # the ArtifactBuild is created. self.set_context(db_event) nvr = image.nvr if nvr in builds: self.log_debug( "Skipping recording build %s, " "it is already in db", nvr) continue parent_build = db_event.get_artifact_build_from_event_dependencies( nvr) if parent_build: self.log_debug( "Skipping recording build %s, " "it is already built in dependant event %r", nvr, parent_build[0].event_id) continue self.log_debug("Recording %s", nvr) parent_nvr = image["parent"].nvr \ if "parent" in image and image["parent"] else None dep_on = builds[parent_nvr] if parent_nvr in builds else None if parent_nvr: build = db_event.get_artifact_build_from_event_dependencies( parent_nvr) if build: parent_nvr = build[0].rebuilt_nvr dep_on = None if "error" in image and image["error"]: state_reason = image["error"] state = ArtifactBuildState.FAILED.value elif dep_on and dep_on.state == ArtifactBuildState.FAILED.value: # If this artifact build depends on a build which cannot # be built by Freshmaker, mark this one as failed too. state_reason = "Cannot build artifact, because its " \ "dependency cannot be built." state = ArtifactBuildState.FAILED.value else: state_reason = "" state = ArtifactBuildState.PLANNED.value image_name = koji.parse_NVR(image.nvr)["name"] # Only released images are considered as directly affected for # rebuild. If some image is not in the latest released version and # it is included in a rebuild, it must be just a dependency of # other image. if image.get('directly_affected'): rebuild_reason = RebuildReason.DIRECTLY_AFFECTED.value else: rebuild_reason = RebuildReason.DEPENDENCY.value build = self.record_build( event, image_name, ArtifactType.IMAGE, dep_on=dep_on, state=ArtifactBuildState.PLANNED.value, original_nvr=nvr, rebuild_reason=rebuild_reason) # Set context to particular build so logging shows this build # in case of error. self.set_context(build) build.transition(state, state_reason) build.build_args = json.dumps({ "repository": image["repository"], "commit": image["commit"], "original_parent": parent_nvr, "target": image["target"], "branch": image["git_branch"], "arches": image["arches"], "renewed_odcs_compose_ids": image["odcs_compose_ids"], }) db.session.commit() if state != ArtifactBuildState.FAILED.value: # Store odcs pulp compose to build. # Also generate pulp repos in case the image is unpublished, # because in this case, we have to generate extra ODCS compose # with all the RPMs in the image anyway later. And OSBS works # in a way that we have to pass all the ODCS composes to it or # no ODCS compose at all. if image["generate_pulp_repos"] or not image["published"]: # Check if the compose for these content_sets is # already cached and use it in this case. cache_key = " ".join(sorted(image["content_sets"])) if cache_key in odcs_cache: db_compose = odcs_cache[cache_key] else: compose = self.odcs.prepare_pulp_repo( build, image["content_sets"]) if build.state != ArtifactBuildState.FAILED.value: db_compose = Compose( odcs_compose_id=compose['id']) db.session.add(db_compose) db.session.commit() odcs_cache[cache_key] = db_compose else: db_compose = None db.session.commit() if db_compose: build.add_composes(db.session, [db_compose]) db.session.commit() # Unpublished images can contain unreleased RPMs, so generate # the ODCS compose with all the RPMs in the image to allow # installation of possibly unreleased RPMs. if not image["published"]: compose = self.odcs.prepare_odcs_compose_with_image_rpms( image) if compose: db_compose = Compose(odcs_compose_id=compose['id']) db.session.add(db_compose) db.session.commit() build.add_composes(db.session, [db_compose]) db.session.commit() builds[nvr] = build # Reset context to db_event. self.set_context(db_event) return builds
def setUp(self): super(TestRebuildImagesOnODCSComposeDone, self).setUp() # Test data # (Inner build depends on outer build) # Event (ErrataAdvisoryRPMsSignedEvent): # build 1: [compose 1, pulp compose 1] # build 2: [compose 1, pulp compose 2] # build 3: [compose 1, pulp compose 3] # build 4: [compose 1, pulp compose 4] # build 5: [compose 1, pulp compose 5] # build 6 (not planned): [compose 1, pulp compose 6] self.db_event = Event.create( db.session, 'msg-1', 'search-key-1', EVENT_TYPES[ErrataAdvisoryRPMsSignedEvent], state=EventState.INITIALIZED, released=False) self.build_1 = ArtifactBuild.create(db.session, self.db_event, 'build-1', ArtifactType.IMAGE, state=ArtifactBuildState.PLANNED) self.build_2 = ArtifactBuild.create(db.session, self.db_event, 'build-2', ArtifactType.IMAGE, dep_on=self.build_1, state=ArtifactBuildState.PLANNED) self.build_3 = ArtifactBuild.create(db.session, self.db_event, 'build-3', ArtifactType.IMAGE, state=ArtifactBuildState.PLANNED) self.build_4 = ArtifactBuild.create(db.session, self.db_event, 'build-4', ArtifactType.IMAGE, dep_on=self.build_3, state=ArtifactBuildState.PLANNED) self.build_5 = ArtifactBuild.create(db.session, self.db_event, 'build-5', ArtifactType.IMAGE, dep_on=self.build_3, state=ArtifactBuildState.PLANNED) self.build_6 = ArtifactBuild.create(db.session, self.db_event, 'build-6', ArtifactType.IMAGE, state=ArtifactBuildState.BUILD) self.compose_1 = Compose(odcs_compose_id=1) db.session.add(self.compose_1) db.session.commit() builds = [ self.build_1, self.build_2, self.build_3, self.build_4, self.build_5, self.build_6 ] composes = [self.compose_1] * 6 for build, compose in zip(builds, composes): db.session.add( ArtifactBuildCompose(build_id=build.id, compose_id=compose.id)) db.session.commit() # Create another DB event, build and compose just to have more data # in database. another_db_event = Event.create( db.session, 'msg-2', 'search-key-2', EVENT_TYPES[ErrataAdvisoryRPMsSignedEvent], state=EventState.INITIALIZED, released=False) another_build_1 = ArtifactBuild.create( db.session, another_db_event, 'another-build-1', ArtifactType.IMAGE, state=ArtifactBuildState.PLANNED) another_compose_1 = Compose(odcs_compose_id=2) db.session.add(another_compose_1) db.session.commit() db.session.add( ArtifactBuildCompose(build_id=another_build_1.id, compose_id=another_compose_1.id)) db.session.commit()
def test_get_lowest_compose_id(self): compose_id = Compose.get_lowest_compose_id(db.session) self.assertEqual(compose_id, -1)