def test_transform_from_done_to_ready(self, ClientSession, publish): clean_database() # This build should be queried and transformed to ready state module_build = make_module_in_db( "pkg:0.1:1:c1", [{ "requires": { "platform": ["el8"] }, "buildrequires": { "platform": ["el8"] }, }], ) module_build.transition(db_session, conf, BUILD_STATES["done"], "Move to done directly for running test.") db_session.commit() # Assert this call below first_publish_call = call( "module.state.change", module_build.json(db_session, show_tasks=False), conf, "mbs", ) ClientSession.return_value.getBuild.return_value = { "extra": { "typeinfo": { "module": { "module_build_service_id": module_build.id } } } } msg = { "msg_id": "msg-id-1", "topic": "org.fedoraproject.prod.greenwave.decision.update", "msg": { "decision_context": "test_dec_context", "policies_satisfied": True, "subject_identifier": "pkg-0.1-1.c1", }, } hub = Mock(config={"validate_signatures": False}) consumer = MBSConsumer(hub) consumer.consume(msg) db_session.add(module_build) # Load module build again to check its state is moved correctly db_session.refresh(module_build) assert BUILD_STATES["ready"] == module_build.state publish.assert_has_calls([ first_publish_call, call("module.state.change", module_build.json(db_session, show_tasks=False), conf, "mbs"), ])
def test_a_single_match_finalize(self, connect, build_fn, get_session, ready, list_tasks_fn, mock_gabt, mock_uea, finalizer): """ Test that when a repo msg hits us and we have a single match. """ scheduler_init_data(tangerine_state=1) get_session.return_value = mock.Mock(), "development" build_fn.return_value = 1234, 1, "", None # Ensure the time_completed is None, so we can test it is set to # some date once the build is finalized. module_build = module_build_service.common.models.ModuleBuild.get_by_id( db_session, 2) module_build.time_completed = None db_session.commit() def mocked_finalizer(succeeded=None): # Check that the time_completed is set in the time when # finalizer is called. assert succeeded is True module_build = module_build_service.common.models.ModuleBuild.get_by_id( db_session, 2) assert module_build.time_completed is not None finalizer.side_effect = mocked_finalizer module_build_service.scheduler.handlers.repos.done( msg_id="some_msg_id", tag_name="module-testmodule-master-20170109091357-7c29193d-build") finalizer.assert_called_once()
def test_get_rpm_release_metadata_br_stream_override(mock_admmn): """ Test that when a module buildrequires a module in conf.allowed_privileged_module_names, and that module has the xmd.mbs.disttag_marking field set, it should influence the disttag. """ scheduler_init_data(1) metadata_mmd = load_mmd(read_staged_data("build_metadata_module")) import_mmd(db_session, metadata_mmd) build_one = models.ModuleBuild.get_by_id(db_session, 2) mmd = build_one.mmd() deps = mmd.get_dependencies()[0] deps.add_buildtime_stream("build", "product1.2") xmd = mmd.get_xmd() xmd["mbs"]["buildrequires"]["build"] = { "filtered_rpms": [], "ref": "virtual", "stream": "product1.2", "version": "1", "context": "00000000", } mmd.set_xmd(xmd) build_one.modulemd = mmd_to_str(mmd) db_session.add(build_one) db_session.commit() release = utils.get_rpm_release(db_session, build_one) assert release == "module+product12+2+814cfa39"
def nudge_module_builds_in_state(state_name, older_than_minutes): """ Finds all the module builds in the `state` with `time_modified` older than `older_than_minutes` and adds fake MBSModule message to the work queue. """ log.info("Looking for module builds stuck in the %s state", state_name) builds = models.ModuleBuild.by_state(db_session, state_name) log.info(" %r module builds in the %s state...", len(builds), state_name) now = datetime.utcnow() time_modified_threshold = timedelta(minutes=older_than_minutes) for build in builds: # Only give builds a nudge if stuck for more than ten minutes if (now - build.time_modified) < time_modified_threshold: continue # Pretend the build is modified, so we don't tight spin. build.time_modified = now db_session.commit() # Fake a message to kickstart the build anew in the consumer state = module_build_service.common.models.BUILD_STATES[state_name] handler = ON_MODULE_CHANGE_HANDLERS[state] handler.delay("internal:mbs.module.state.change", build.id, state)
def truncate_tables(): """Much cheaper operation (up to 2/3 faster) than clean_database (DROP/CREATE)""" db_session.remove() db_session.configure(bind=db.session.get_bind()) meta = db.metadata for table in reversed(meta.sorted_tables): db_session.execute(table.delete()) if db_session.bind.dialect.name == "postgresql": # POSTGRES ONLY (!) # Tests reference test data by IDs, assuming they always start from 1. # In psql, sequences are created for models' IDs - they need to be reset. sequences = [ "component_builds_id_seq", "component_builds_trace_id_seq", "log_messages_id_seq", "module_arches_id_seq", "module_builds_id_seq", "module_builds_trace_id_seq", "virtual_streams_id_seq" ] sql_cmds = [ "alter sequence {} restart with 1;".format(s) for s in sequences ] db_session.execute("".join(sql_cmds)) db_session.commit()
def start_build_component(db_session, builder, c): """ Submits single component build to builder. Called in thread by QueueBasedThreadPool in continue_batch_build. This function runs inside separate threads that share one SQLAlchemy session object to update a module build state once there is something wrong when one of its components is submitted to Koji to build. """ import koji try: c.task_id, c.state, c.state_reason, c.nvr = builder.build( artifact_name=c.package, source=c.scmurl) except Exception as e: c.state = koji.BUILD_STATES["FAILED"] c.state_reason = "Failed to build artifact %s: %s" % (c.package, str(e)) log.exception(e) with BUILD_COMPONENT_DB_SESSION_LOCK: c.module_build.transition(conf, models.BUILD_STATES["failed"], failure_type="infra") db_session.commit() return if not c.task_id and c.is_building: c.state = koji.BUILD_STATES["FAILED"] c.state_reason = "Failed to build artifact %s: Builder did not return task ID" % (c.package) with BUILD_COMPONENT_DB_SESSION_LOCK: c.module_build.transition(conf, models.BUILD_STATES["failed"], failure_type="infra") db_session.commit() return
def test_retire_build(self, prompt_bool, overrides, identifier, changed_count): prompt_bool.return_value = True module_builds = (db_session.query(ModuleBuild).filter_by( state=BUILD_STATES["ready"]).order_by(ModuleBuild.id.desc()).all()) # Verify our assumption of the amount of ModuleBuilds in database assert len(module_builds) == 3 for x, build in enumerate(module_builds): build.name = "spam" build.stream = "eggs" build.version = "ham" build.context = str(x) for attr, value in overrides.items(): setattr(module_builds[0], attr, value) db_session.commit() retire(identifier) retired_module_builds = (db_session.query(ModuleBuild).filter_by( state=BUILD_STATES["garbage"]).order_by( ModuleBuild.id.desc()).all()) assert len(retired_module_builds) == changed_count for x in range(changed_count): assert retired_module_builds[x].id == module_builds[x].id assert retired_module_builds[x].state == BUILD_STATES["garbage"]
def test_get_reusable_component_different_arch_in_batch( self, changed_component, reuse_component): """ Test that we get the correct reuse behavior for the changed-and-after strategy. Changes to the architectures in earlier batches should prevent reuse, but such changes to later batches should not. For context, see https://pagure.io/fm-orchestrator/issue/1298 """ if changed_component == reuse_component: # we're only testing the cases where these are different # this case is already covered by test_get_reusable_component_different_arches return second_module_build = models.ModuleBuild.get_by_id(db_session, 3) # update arch for changed component mmd = second_module_build.mmd() component = mmd.get_rpm_component(changed_component) component.reset_arches() component.add_restricted_arch("i686") second_module_build.modulemd = mmd_to_str(mmd) db_session.commit() changed_component = models.ComponentBuild.from_component_name( db_session, changed_component, second_module_build.id) reuse_component = models.ComponentBuild.from_component_name( db_session, reuse_component, second_module_build.id) reuse_result = get_reusable_component(second_module_build, reuse_component.package) # Changing the arch of a component should prevent reuse only when the changed component # is in a batch earlier than the component being considered for reuse. assert bool(reuse_result is None) == bool( reuse_component.batch > changed_component.batch)
def test_get_buildrequired_modulemds(self): mmd = load_mmd(tests.read_staged_data("platform")) mmd = mmd.copy(mmd.get_module_name(), "f30.1.3") import_mmd(db_session, mmd) platform_f300103 = db_session.query(ModuleBuild).filter_by(stream="f30.1.3").one() mmd = tests.make_module("testmodule:master:20170109091357:123") build = ModuleBuild( name="testmodule", stream="master", version=20170109091357, state=5, build_context="dd4de1c346dcf09ce77d38cd4e75094ec1c08ec3", runtime_context="ec4de1c346dcf09ce77d38cd4e75094ec1c08ef7", context="7c29193d", koji_tag="module-testmodule-master-20170109091357-7c29193d", scmurl="https://src.stg.fedoraproject.org/modules/testmodule.git?#ff1ea79", batch=3, owner="Dr. Pepper", time_submitted=datetime(2018, 11, 15, 16, 8, 18), time_modified=datetime(2018, 11, 15, 16, 19, 35), rebuild_strategy="changed-and-after", modulemd=mmd_to_str(mmd), ) build.buildrequires.append(platform_f300103) db_session.add(build) db_session.commit() resolver = mbs_resolver.GenericResolver.create(db_session, conf, backend="db") result = resolver.get_buildrequired_modulemds( "testmodule", "master", platform_f300103.mmd()) nsvcs = {m.get_nsvc() for m in result} assert nsvcs == {"testmodule:master:20170109091357:123"}
def test_get_reusable_component_different_batch(self, changed_component, reuse_component): """ Test that we get the correct reuse behavior for the changed-and-after strategy. Changes to earlier batches should prevent reuse, but changes to later batches should not. For context, see https://pagure.io/fm-orchestrator/issue/1298 """ if changed_component == reuse_component: # we're only testing the cases where these are different # this case is already covered by test_get_reusable_component_different_component return second_module_build = models.ModuleBuild.get_by_id(db_session, 3) # update batch for changed component changed_component = models.ComponentBuild.from_component_name( db_session, changed_component, second_module_build.id) orig_batch = changed_component.batch changed_component.batch = orig_batch + 1 db_session.commit() reuse_component = models.ComponentBuild.from_component_name( db_session, reuse_component, second_module_build.id) reuse_result = get_reusable_component(second_module_build, reuse_component.package) # Component reuse should only be blocked when an earlier batch has been changed. # In this case, orig_batch is the earliest batch that has been changed (the changed # component has been removed from it and added to the following one). assert bool(reuse_result is None) == bool( reuse_component.batch > orig_batch)
def test_only_delete_build_target_with_allowed_koji_tag_prefix( self, ClientSession, create_builder, dbg ): module_build_2 = models.ModuleBuild.get_by_id(db_session, 2) # Only module build 1's build target should be deleted. module_build_2.koji_tag = "module-tag1" module_build_2.state = models.BUILD_STATES["done"] # Ensure to exceed the koji_target_delete_time easily later for deletion module_build_2.time_completed = datetime.utcnow() - timedelta(hours=24) module_build_3 = models.ModuleBuild.get_by_id(db_session, 3) module_build_3.koji_tag = "f28" db_session.commit() db_session.refresh(module_build_2) db_session.refresh(module_build_3) koji_session = ClientSession.return_value # No created module build has any of these tags. koji_session.getBuildTargets.return_value = [ {"id": 1, "dest_tag_name": module_build_2.koji_tag, "name": module_build_2.koji_tag}, {"id": 2, "dest_tag_name": module_build_3.koji_tag, "name": module_build_3.koji_tag}, ] with patch.object(conf, "koji_tag_prefixes", new=["module", "another-prefix"]): with patch.object(conf, "koji_target_delete_time", new=60): producer.delete_old_koji_targets() koji_session.deleteBuildTarget.assert_called_once_with(1) koji_session.krb_login.assert_called_once()
def test_trigger_new_repo_when_succeeded(self, ClientSession, create_builder, dbg): """ Tests that we do not call koji_sesion.newRepo when newRepo task succeeded. """ koji_session = ClientSession.return_value koji_session.getTag = lambda tag_name: {"name": tag_name} koji_session.getTaskInfo.return_value = {"state": koji.TASK_STATES["CLOSED"]} koji_session.newRepo.return_value = 123456 builder = mock.MagicMock() builder.buildroot_ready.return_value = False create_builder.return_value = builder # Change the batch to 2, so the module build is in state where # it is not building anything, but the state is "build". module_build = models.ModuleBuild.get_by_id(db_session, 3) module_build.batch = 2 module_build.new_repo_task_id = 123456 db_session.commit() producer.retrigger_new_repo_on_failure() module_build = models.ModuleBuild.get_by_id(db_session, 3) assert not koji_session.newRepo.called assert module_build.new_repo_task_id == 123456
def test_get_reusable_component_different_buildrequires_stream( self, rebuild_strategy): first_module_build = models.ModuleBuild.get_by_id(db_session, 2) first_module_build.rebuild_strategy = rebuild_strategy db_session.commit() second_module_build = models.ModuleBuild.get_by_id(db_session, 3) mmd = second_module_build.mmd() xmd = mmd.get_xmd() xmd["mbs"]["buildrequires"]["platform"]["stream"] = "different" deps = Modulemd.Dependencies() deps.add_buildtime_stream("platform", "different") deps.add_runtime_stream("platform", "different") mmd.clear_dependencies() mmd.add_dependencies(deps) mmd.set_xmd(xmd) second_module_build.modulemd = mmd_to_str(mmd) second_module_build.build_context = \ models.ModuleBuild.contexts_from_mmd(second_module_build.modulemd).build_context second_module_build.rebuild_strategy = rebuild_strategy db_session.commit() plc_rv = get_reusable_component(second_module_build, "perl-List-Compare") pt_rv = get_reusable_component(second_module_build, "perl-Tangerine") tangerine_rv = get_reusable_component(second_module_build, "tangerine") assert plc_rv is None assert pt_rv is None assert tangerine_rv is None
def test_add_default_modules_request_failed(mock_get_dm): """ Test that an exception is raised when the call to _get_default_modules failed. """ clean_database() make_module_in_db("python:3:12345:1") make_module_in_db("nodejs:11:2345:2") mmd = load_mmd(read_staged_data("formatted_testmodule.yaml")) xmd_brs = mmd.get_xmd()["mbs"]["buildrequires"] assert set(xmd_brs.keys()) == {"platform"} platform = ModuleBuild.get_build_from_nsvc( db_session, "platform", xmd_brs["platform"]["stream"], xmd_brs["platform"]["version"], xmd_brs["platform"]["context"], ) assert platform platform_mmd = platform.mmd() platform_xmd = mmd.get_xmd() platform_xmd["mbs"]["use_default_modules"] = True platform_mmd.set_xmd(platform_xmd) platform.modulemd = mmd_to_str(platform_mmd) db_session.commit() expected_error = "some error" mock_get_dm.side_effect = ValueError(expected_error) with pytest.raises(ValueError, match=expected_error): default_modules.add_default_modules(mmd)
def test_process_waiting_module_build(self, create_builder, dbg, state): """ Test that processing old waiting module builds works. """ handler = producer.ON_MODULE_CHANGE_HANDLERS[models.BUILD_STATES[state]] # Change the batch to 2, so the module build is in state where # it is not building anything, but the state is "build". module_build = models.ModuleBuild.get_by_id(db_session, 3) module_build.state = models.BUILD_STATES[state] original = datetime.utcnow() - timedelta(minutes=11) module_build.time_modified = original db_session.commit() db_session.refresh(module_build) # Poll :) producer.process_waiting_module_builds() handler.delay.assert_called_once_with( "internal:mbs.module.state.change", module_build.id, module_build.state ) db_session.refresh(module_build) # ensure the time_modified was changed. assert module_build.time_modified > original
def test_get_reusable_component_different_buildrequires(self): second_module_build = models.ModuleBuild.get_by_id(db_session, 3) mmd = second_module_build.mmd() mmd.get_dependencies()[0].add_buildtime_stream("some_module", "master") xmd = mmd.get_xmd() xmd["mbs"]["buildrequires"] = { "some_module": { "ref": "da39a3ee5e6b4b0d3255bfef95601890afd80709", "stream": "master", "version": "20170123140147", } } mmd.set_xmd(xmd) second_module_build.modulemd = mmd_to_str(mmd) second_module_build.build_context = models.ModuleBuild.calculate_build_context( xmd["mbs"]["buildrequires"]) db_session.commit() plc_rv = get_reusable_component(second_module_build, "perl-List-Compare") assert plc_rv is None pt_rv = get_reusable_component(second_module_build, "perl-Tangerine") assert pt_rv is None tangerine_rv = get_reusable_component(second_module_build, "tangerine") assert tangerine_rv is None
def test_get_reusable_component_different_arches(self, set_database_arch, set_current_arch): second_module_build = models.ModuleBuild.get_by_id(db_session, 3) if set_current_arch: # set architecture for current build mmd = second_module_build.mmd() component = mmd.get_rpm_component("tangerine") component.reset_arches() component.add_restricted_arch("i686") second_module_build.modulemd = mmd_to_str(mmd) db_session.commit() if set_database_arch: # set architecture for build in database second_module_changed_component = models.ComponentBuild.from_component_name( db_session, "tangerine", 2) mmd = second_module_changed_component.module_build.mmd() component = mmd.get_rpm_component("tangerine") component.reset_arches() component.add_restricted_arch("i686") second_module_changed_component.module_build.modulemd = mmd_to_str( mmd) db_session.commit() tangerine = get_reusable_component(second_module_build, "tangerine") assert bool(tangerine is None) != bool( set_current_arch == set_database_arch)
def retrigger_new_repo_on_failure(): """ Retrigger failed new repo tasks for module builds in the build state. The newRepo task may fail for various reasons outside the scope of MBS. This method will detect this scenario and retrigger the newRepo task if needed to avoid the module build from being stuck in the "build" state. """ if conf.system != "koji": return koji_session = get_session(conf) module_builds = db_session.query(models.ModuleBuild).filter( models.ModuleBuild.state == models.BUILD_STATES["build"], models.ModuleBuild.new_repo_task_id.isnot(None), ).all() for module_build in module_builds: task_info = koji_session.getTaskInfo(module_build.new_repo_task_id) if task_info["state"] in [koji.TASK_STATES["CANCELED"], koji.TASK_STATES["FAILED"]]: log.info( "newRepo task %s for %r failed, starting another one", str(module_build.new_repo_task_id), module_build, ) taginfo = koji_session.getTag(module_build.koji_tag + "-build") module_build.new_repo_task_id = koji_session.newRepo(taginfo["name"]) db_session.commit()
def test_tagging_already_tagged_artifacts(self, blocklist, mock_get_session): """ Tests that buildroot_add_artifacts and tag_artifacts do not try to tag already tagged artifacts """ module_build = module_build_service.common.models.ModuleBuild.get_by_id( db_session, 2) if blocklist: mmd = module_build.mmd() xmd = mmd.get_xmd() xmd["mbs_options"] = {"blocked_packages": ["foo", "bar", "new"]} mmd.set_xmd(xmd) module_build.modulemd = mmd_to_str(mmd) db_session.commit() builder = FakeKojiModuleBuilder( db_session=db_session, owner=module_build.owner, module=module_build, config=conf, tag_name="module-nginx-1.2", components=[], ) builder.module_tag = {"name": "module-foo", "id": 1} builder.module_build_tag = {"name": "module-foo-build", "id": 2} # Set listTagged to return test data tagged = [{ "nvr": "foo-1.0-1.module_e0095747" }, { "nvr": "bar-1.0-1.module_e0095747" }] builder.koji_session.listTagged.return_value = tagged # Try to tag one artifact which is already tagged and one new ... to_tag = ["foo-1.0-1.module_e0095747", "new-1.0-1.module_e0095747"] builder.buildroot_add_artifacts(to_tag) if blocklist: # "foo" and "new" packages should be unblocked before tagging. expected_calls = [ mock.call("module-foo-build", "foo"), mock.call("module-foo-build", "new"), ] else: expected_calls = [] assert builder.koji_session.packageListUnblock.mock_calls == expected_calls # ... only new one should be added. builder.koji_session.tagBuild.assert_called_once_with( builder.module_build_tag["id"], "new-1.0-1.module_e0095747") # Try the same for tag_artifacts(...). builder.koji_session.tagBuild.reset_mock() builder.tag_artifacts(to_tag) builder.koji_session.tagBuild.assert_called_once_with( builder.module_tag["id"], "new-1.0-1.module_e0095747")
def test_cant_delete_build_target_if_not_reach_delete_time( self, ClientSession, create_builder, dbg ): module_build_2 = models.ModuleBuild.get_by_id(db_session, 2) # Only module build 1's build target should be deleted. module_build_2.koji_tag = "module-tag1" module_build_2.state = models.BUILD_STATES["done"] # Ensure to exceed the koji_target_delete_time easily later for deletion module_build_2.time_completed = datetime.utcnow() - timedelta(minutes=5) db_session.commit() db_session.refresh(module_build_2) koji_session = ClientSession.return_value # No created module build has any of these tags. koji_session.getBuildTargets.return_value = [ {"id": 1, "dest_tag_name": module_build_2.koji_tag, "name": module_build_2.koji_tag} ] with patch.object(conf, "koji_tag_prefixes", new=["module"]): # Use default koji_target_delete_time in config. That time is long # enough for test. producer.delete_old_koji_targets() koji_session.deleteBuildTarget.assert_not_called()
def test_resolve_requires_siblings(self, require_platform_and_default_arch): resolver = mbs_resolver.GenericResolver.create(db_session, conf, backend="db") mmd = load_mmd(tests.read_staged_data("formatted_testmodule")) for i in range(3): build = tests.module_build_from_modulemd(mmd_to_str(mmd)) build.context = "f6e2ae" + str(i) build.build_context = "f6e2aeec7576196241b9afa0b6b22acf2b6873d" + str( i) build.runtime_context = "bbc84c7b817ab3dd54916c0bcd6c6bdf512f7f9c" + str( i) build.state = models.BUILD_STATES["ready"] db_session.add(build) db_session.commit() build_one = ModuleBuild.get_by_id(db_session, 2) nsvc = ":".join([ build_one.name, build_one.stream, build_one.version, build_one.context ]) result = resolver.resolve_requires([nsvc]) assert result == { "testmodule": { "stream": build_one.stream, "version": build_one.version, "context": build_one.context, "ref": "65a7721ee4eff44d2a63fb8f3a8da6e944ab7f4d", "koji_tag": None } } db_session.commit()
def test_get_module_build_dependencies_recursive( self, reuse_component_init_data): """ Tests that the buildrequires are returned when it is two layers deep """ # Add testmodule2 that requires testmodule module = models.ModuleBuild.get_by_id(db_session, 3) mmd = module.mmd() # Rename the module mmd = mmd.copy("testmodule2") mmd.set_version(20180123171545) deps = Modulemd.Dependencies() deps.add_runtime_stream("testmodule", "master") mmd.add_dependencies(deps) xmd = mmd.get_xmd() xmd["mbs"]["requires"]["testmodule"] = { "filtered_rpms": [], "ref": "620ec77321b2ea7b0d67d82992dda3e1d67055b4", "stream": "master", "version": "20180205135154", } mmd.set_xmd(xmd) module.modulemd = mmd_to_str(mmd) module.name = "testmodule2" module.version = str(mmd.get_version()) module.koji_tag = "module-ae2adf69caf0e1b6" db_session.commit() resolver = mbs_resolver.GenericResolver.create(db_session, conf, backend="db") result = resolver.get_module_build_dependencies( "testmodule2", "master", "20180123171545", "c40c156c").keys() assert set(result) == {"module-f28-build"}
def test_cleanup_stale_failed_builds_no_components(self, create_builder, dbg): """ Test that a module build without any components built gets to the garbage state when running cleanup_stale_failed_builds. """ module_build_one = models.ModuleBuild.get_by_id(db_session, 1) module_build_one.state = models.BUILD_STATES["failed"] module_build_one.time_modified = datetime.utcnow() module_build_two = models.ModuleBuild.get_by_id(db_session, 2) module_build_two.state = models.BUILD_STATES["failed"] module_build_two.time_modified = datetime.utcnow() - timedelta( days=conf.cleanup_failed_builds_time + 1) module_build_two.koji_tag = None module_build_two.cg_build_koji_tag = None for c in module_build_two.component_builds: c.state = None db_session.commit() producer.cleanup_stale_failed_builds() db_session.refresh(module_build_two) # Make sure module_build_two was transitioned to garbage assert module_build_two.state == models.BUILD_STATES["garbage"] state_reason = ( "The module was garbage collected since it has failed over {0} day(s) ago" .format(conf.cleanup_failed_builds_time) ) assert module_build_two.state_reason == state_reason # Make sure module_build_one stayed the same assert module_build_one.state == models.BUILD_STATES["failed"] # Make sure that the builder was never instantiated create_builder.assert_not_called()
def require_platform_and_default_arch_cls(require_empty_database_cls): """Provides clean database with platform module and default arch""" arch_obj = module_build_service.common.models.ModuleArch(name="x86_64") db_session.add(arch_obj) db_session.commit() mmd = load_mmd(read_staged_data("platform")) import_mmd(db_session, mmd)
def test_add_default_modules(mock_get_dm, mock_hc, require_platform_and_default_arch): """ Test that default modules present in the database are added, and the others are ignored. """ mmd = load_mmd(read_staged_data("formatted_testmodule.yaml")) xmd_brs = mmd.get_xmd()["mbs"]["buildrequires"] assert set(xmd_brs.keys()) == {"platform"} platform = ModuleBuild.get_build_from_nsvc( db_session, "platform", xmd_brs["platform"]["stream"], xmd_brs["platform"]["version"], xmd_brs["platform"]["context"], ) assert platform platform_mmd = platform.mmd() platform_xmd = mmd.get_xmd() platform_xmd["mbs"]["use_default_modules"] = True platform_mmd.set_xmd(platform_xmd) platform.modulemd = mmd_to_str(platform_mmd) dependencies = [{ "requires": { "platform": ["f28"] }, "buildrequires": { "platform": ["f28"] } }] make_module_in_db("python:3:12345:1", base_module=platform, dependencies=dependencies) make_module_in_db("nodejs:11:2345:2", base_module=platform, dependencies=dependencies) db_session.commit() mock_get_dm.return_value = { "nodejs": "11", "python": "3", "ruby": "2.6", } defaults_added = default_modules.add_default_modules(mmd) # Make sure that the default modules were added. ruby:2.6 will be ignored since it's not in # the database assert set(mmd.get_xmd()["mbs"]["buildrequires"].keys()) == { "nodejs", "platform", "python" } mock_get_dm.assert_called_once_with( "f28", "https://pagure.io/releng/fedora-module-defaults.git", ) assert "ursine_rpms" not in mmd.get_xmd()["mbs"] assert defaults_added is True
def test_get_built_rpms_in_module_build(self, ClientSession): session = ClientSession.return_value session.listTaggedRPMS.return_value = ( [ { "build_id": 735939, "name": "tar", "extra": None, "arch": "ppc64le", "buildtime": 1533299221, "id": 6021394, "epoch": 2, "version": "1.30", "metadata_only": False, "release": "4.el8+1308+551bfa71", "buildroot_id": 4321122, "payloadhash": "0621ab2091256d21c47dcac868e7fc2a", "size": 878684, }, { "build_id": 735939, "name": "bar", "extra": None, "arch": "ppc64le", "buildtime": 1533299221, "id": 6021394, "epoch": 2, "version": "1.30", "metadata_only": False, "release": "4.el8+1308+551bfa71", "buildroot_id": 4321122, "payloadhash": "0621ab2091256d21c47dcac868e7fc2a", "size": 878684, }, ], [], ) module_build = module_build_service.common.models.ModuleBuild.get_by_id( db_session, 2) # Module builds generated by init_data uses generic modulemd file and # the module's name/stream/version/context does not have to match it. # But for this test, we need it to match. mmd = module_build.mmd() module_build.name = mmd.get_module_name() module_build.stream = mmd.get_stream_name() module_build.version = mmd.get_version() module_build.context = mmd.get_context() db_session.commit() ret = KojiModuleBuilder.get_built_rpms_in_module_build(mmd) assert set(ret) == { "bar-2:1.30-4.el8+1308+551bfa71", "tar-2:1.30-4.el8+1308+551bfa71" } session.assert_not_called()
def test_recover_orphaned_artifact_when_untagged(self, mock_get_session): """ Tests recover_orphaned_artifact when the build is found but untagged """ module_build = module_build_service.common.models.ModuleBuild.get_by_id( db_session, 2) builder = FakeKojiModuleBuilder( db_session=db_session, owner=module_build.owner, module=module_build, config=conf, tag_name="module-foo", components=[], ) builder.module_tag = {"name": "module-foo", "id": 1} builder.module_build_tag = {"name": "module-foo-build", "id": 2} dist_tag = "module+2+b8661ee4" # Set listTagged to return test data builder.koji_session.listTagged.side_effect = [[], [], []] untagged = [{ "id": 9000, "name": "foo", "version": "1.0", "release": "1.{0}".format(dist_tag) }] builder.koji_session.untaggedBuilds.return_value = untagged build_info = { "nvr": "foo-1.0-1.{0}".format(dist_tag), "task_id": 12345, "build_id": 91 } builder.koji_session.getBuild.return_value = build_info module_build = module_build_service.common.models.ModuleBuild.get_by_id( db_session, 4) module_build.component_builds.sort(key=lambda item: item.id) component_build = module_build.component_builds[0] component_build.task_id = None component_build.nvr = None component_build.state = None db_session.commit() recovered = builder.recover_orphaned_artifact(component_build) db_session.commit() assert recovered event_info = events.scheduler.queue[0][3] assert event_info == ('recover_orphaned_artifact: fake message', 12345, 1, 'rubygem-rails', '1.0', '1.module+2+b8661ee4', 4, None) assert component_build.state == koji.BUILD_STATES["COMPLETE"] assert component_build.task_id == 12345 assert component_build.state_reason == "Found existing build" builder.koji_session.tagBuild.assert_called_once_with( 2, "foo-1.0-1.{0}".format(dist_tag))
def test_do_not_handle_a_duplicate_late_init_message(self): build = db_session.query(ModuleBuild).filter( ModuleBuild.name == "testmodule").one() build.state = BUILD_STATES["wait"] db_session.commit() with patch.object(module_build_service.scheduler.handlers.modules, "log") as log: self.fn("msg-id-123", build.id, BUILD_STATES["init"]) assert 2 == log.warning.call_count
def process_message(self, event_info): # Choose a handler for this message handler, build = self._map_message(db_session, event_info) if handler is None: log.debug("No event handler associated with msg %s", event_info["msg_id"]) return idx = "%s: %s, %s" % (handler.__name__, event_info["event"], event_info["msg_id"]) if handler is no_op_handler: log.debug("Handler is NO_OP: %s", idx) return if not build: log.debug("No module associated with msg %s", event_info["msg_id"]) return MBSConsumer.current_module_build_id = build.id log.info("Calling %s", idx) kwargs = event_info.copy() kwargs.pop("event") try: if conf.celery_broker_url: # handlers are also Celery tasks, when celery_broker_url is configured, # call "delay" method to run the handlers as Celery async tasks func = getattr(handler, "delay") func(**kwargs) else: handler(**kwargs) except Exception as e: log.exception("Could not process message handler.") db_session.rollback() db_session.refresh(build) build.transition( db_session, conf, state=models.BUILD_STATES["failed"], state_reason=str(e), failure_type="infra", ) db_session.commit() # Allow caller to do something when error is occurred. raise finally: MBSConsumer.current_module_build_id = None log.debug("Done with %s", idx)
def test_add_default_modules_compatible_platforms(mock_get_dm): """ Test that default modules built against compatible base module streams are added. """ clean_database(add_platform_module=False) # Create compatible base modules. mmd = load_mmd(read_staged_data("platform")) for stream in ["f27", "f28"]: mmd = mmd.copy("platform", stream) # Set the virtual stream to "fedora" to make these base modules compatible. xmd = mmd.get_xmd() xmd["mbs"]["virtual_streams"] = ["fedora"] xmd["mbs"]["use_default_modules"] = True mmd.set_xmd(xmd) import_mmd(db_session, mmd) mmd = load_mmd(read_staged_data("formatted_testmodule.yaml")) xmd_brs = mmd.get_xmd()["mbs"]["buildrequires"] assert set(xmd_brs.keys()) == {"platform"} platform_f27 = ModuleBuild.get_build_from_nsvc( db_session, "platform", "f27", "3", "00000000") assert platform_f27 # Create python default module which requires platform:f27 and therefore cannot be used # as default module for platform:f28. dependencies = [ {"requires": {"platform": ["f27"]}, "buildrequires": {"platform": ["f27"]}}] make_module_in_db("python:3:12345:1", base_module=platform_f27, dependencies=dependencies) # Create nodejs default module which requries any platform stream and therefore can be used # as default module for platform:f28. dependencies[0]["requires"]["platform"] = [] make_module_in_db("nodejs:11:2345:2", base_module=platform_f27, dependencies=dependencies) db_session.commit() mock_get_dm.return_value = { "nodejs": "11", "python": "3", "ruby": "2.6", } defaults_added = default_modules.add_default_modules(mmd) # Make sure that the default modules were added. ruby:2.6 will be ignored since it's not in # the database assert set(mmd.get_xmd()["mbs"]["buildrequires"].keys()) == {"nodejs", "platform"} mock_get_dm.assert_called_once_with( "f28", "https://pagure.io/releng/fedora-module-defaults.git", ) assert defaults_added is True