def test_get_rpm_release_metadata_br_stream_override(mock_admmn): """ Test that when a module buildrequires a module in conf.allowed_privileged_module_names, and that module has the xmd.mbs.disttag_marking field set, it should influence the disttag. """ scheduler_init_data(1) metadata_mmd = load_mmd(read_staged_data("build_metadata_module")) import_mmd(db_session, metadata_mmd) build_one = models.ModuleBuild.get_by_id(db_session, 2) mmd = build_one.mmd() deps = mmd.get_dependencies()[0] deps.add_buildtime_stream("build", "product1.2") xmd = mmd.get_xmd() xmd["mbs"]["buildrequires"]["build"] = { "filtered_rpms": [], "ref": "virtual", "stream": "product1.2", "version": "1", "context": "00000000", } mmd.set_xmd(xmd) build_one.modulemd = mmd_to_str(mmd) db_session.add(build_one) db_session.commit() release = utils.get_rpm_release(db_session, build_one) assert release == "module+product12+2+814cfa39"
def clean_database(add_platform_module=True, add_default_arches=True): """Initialize the test database This function is responsible for dropping all the data in the database and recreating all the tables from scratch. Please note that, this function relies on database objects managed by Flask-SQLAlchemy. """ # Helpful for writing tests if any changes were made using the database # session but the test didn't commit or rollback. # # clean_database is usually called before a test run. So, it makes no sense # to keep any changes in the transaction made by previous test. db_session.remove() db_session.configure(bind=db.session.get_bind()) db.drop_all() db.create_all() if add_default_arches: arch_obj = module_build_service.common.models.ModuleArch(name="x86_64") db.session.add(arch_obj) db.session.commit() if add_platform_module: mmd = load_mmd(read_staged_data("platform")) import_mmd(db.session, mmd)
def test_import_mmd_dont_remove_dropped_virtual_streams_associated_with_other_modules( ): mmd = load_mmd(read_staged_data("formatted_testmodule")) # Add some virtual streams to this module metadata xmd = mmd.get_xmd() xmd["mbs"]["virtual_streams"] = ["f28", "f29", "f30"] mmd.set_xmd(xmd) import_mmd(db_session, mmd) # Import another module which has overlapping virtual streams another_mmd = load_mmd( read_staged_data("formatted_testmodule-more-components")) # Add some virtual streams to this module metadata xmd = another_mmd.get_xmd() xmd["mbs"]["virtual_streams"] = ["f29", "f30"] another_mmd.set_xmd(xmd) another_module_build, _ = import_mmd(db_session, another_mmd) # Now, remove f30 from mmd xmd = mmd.get_xmd() xmd["mbs"]["virtual_streams"] = ["f28", "f29"] mmd.set_xmd(xmd) # Reimport formatted_testmodule again module_build, _ = import_mmd(db_session, mmd) db_session.refresh(module_build) assert ["f28", "f29"] == sorted(item.name for item in module_build.virtual_streams) # The overlapped f30 should be still there. db_session.refresh(another_module_build) assert ["f29", "f30"] == sorted(item.name for item in another_module_build.virtual_streams)
def test_get_buildrequired_modulemds(self): mmd = load_mmd(tests.read_staged_data("platform")) mmd = mmd.copy(mmd.get_module_name(), "f30.1.3") import_mmd(db_session, mmd) platform_f300103 = db_session.query(ModuleBuild).filter_by(stream="f30.1.3").one() mmd = tests.make_module("testmodule:master:20170109091357:123") build = ModuleBuild( name="testmodule", stream="master", version=20170109091357, state=5, build_context="dd4de1c346dcf09ce77d38cd4e75094ec1c08ec3", runtime_context="ec4de1c346dcf09ce77d38cd4e75094ec1c08ef7", context="7c29193d", koji_tag="module-testmodule-master-20170109091357-7c29193d", scmurl="https://src.stg.fedoraproject.org/modules/testmodule.git?#ff1ea79", batch=3, owner="Dr. Pepper", time_submitted=datetime(2018, 11, 15, 16, 8, 18), time_modified=datetime(2018, 11, 15, 16, 19, 35), rebuild_strategy="changed-and-after", modulemd=mmd_to_str(mmd), ) build.buildrequires.append(platform_f300103) db_session.add(build) db_session.commit() resolver = mbs_resolver.GenericResolver.create(db_session, conf, backend="db") result = resolver.get_buildrequired_modulemds( "testmodule", "master", platform_f300103.mmd()) nsvcs = {m.get_nsvc() for m in result} assert nsvcs == {"testmodule:master:20170109091357:123"}
def test_import_mmd_multiple_dependencies(): mmd = load_mmd(read_staged_data("formatted_testmodule")) mmd.add_dependencies(mmd.get_dependencies()[0].copy()) expected_error = "The imported module's dependencies list should contain just one element" with pytest.raises(UnprocessableEntity) as e: import_mmd(db_session, mmd) assert str(e.value) == expected_error
def require_platform_and_default_arch_cls(require_empty_database_cls): """Provides clean database with platform module and default arch""" arch_obj = module_build_service.common.models.ModuleArch(name="x86_64") db_session.add(arch_obj) db_session.commit() mmd = load_mmd(read_staged_data("platform")) import_mmd(db_session, mmd)
def test_add_default_modules_compatible_platforms(mock_get_dm): """ Test that default modules built against compatible base module streams are added. """ clean_database(add_platform_module=False) # Create compatible base modules. mmd = load_mmd(read_staged_data("platform")) for stream in ["f27", "f28"]: mmd = mmd.copy("platform", stream) # Set the virtual stream to "fedora" to make these base modules compatible. xmd = mmd.get_xmd() xmd["mbs"]["virtual_streams"] = ["fedora"] xmd["mbs"]["use_default_modules"] = True mmd.set_xmd(xmd) import_mmd(db_session, mmd) mmd = load_mmd(read_staged_data("formatted_testmodule.yaml")) xmd_brs = mmd.get_xmd()["mbs"]["buildrequires"] assert set(xmd_brs.keys()) == {"platform"} platform_f27 = ModuleBuild.get_build_from_nsvc( db_session, "platform", "f27", "3", "00000000") assert platform_f27 # Create python default module which requires platform:f27 and therefore cannot be used # as default module for platform:f28. dependencies = [ {"requires": {"platform": ["f27"]}, "buildrequires": {"platform": ["f27"]}}] make_module_in_db("python:3:12345:1", base_module=platform_f27, dependencies=dependencies) # Create nodejs default module which requries any platform stream and therefore can be used # as default module for platform:f28. dependencies[0]["requires"]["platform"] = [] make_module_in_db("nodejs:11:2345:2", base_module=platform_f27, dependencies=dependencies) db_session.commit() mock_get_dm.return_value = { "nodejs": "11", "python": "3", "ruby": "2.6", } defaults_added = default_modules.add_default_modules(mmd) # Make sure that the default modules were added. ruby:2.6 will be ignored since it's not in # the database assert set(mmd.get_xmd()["mbs"]["buildrequires"].keys()) == {"nodejs", "platform"} mock_get_dm.assert_called_once_with( "f28", "https://pagure.io/releng/fedora-module-defaults.git", ) assert defaults_added is True
def test_import_mmd_no_xmd_buildrequires(): mmd = load_mmd(read_staged_data("formatted_testmodule")) xmd = mmd.get_xmd() del xmd["mbs"]["buildrequires"] mmd.set_xmd(xmd) expected_error = ( "The imported module buildrequires other modules, but the metadata in the " 'xmd["mbs"]["buildrequires"] dictionary is missing entries') with pytest.raises(UnprocessableEntity) as e: import_mmd(db_session, mmd) assert str(e.value) == expected_error
def import_builds_from_local_dnf_repos(platform_id=None): """ Imports the module builds from all available local repositories to MBS DB. This is used when building modules locally without any access to MBS infra. This method also generates and imports the base module according to /etc/os-release. :param str platform_id: The `name:stream` of a fake platform module to generate in this method. When not set, the /etc/os-release is parsed to get the PLATFORM_ID. """ log.info("Loading available RPM repositories.") dnf_base = dnf.Base() dnf_base.read_all_repos() log.info("Importing available modules to MBS local database.") for repo in dnf_base.repos.values(): try: repo.load() except Exception as e: log.warning(str(e)) continue mmd_data = repo.get_metadata_content("modules") mmd_index = Modulemd.ModuleIndex.new() ret, _ = mmd_index.update_from_string(mmd_data, True) if not ret: log.warning("Loading the repo '%s' failed", repo.name) continue for module_name in mmd_index.get_module_names(): for mmd in mmd_index.get_module(module_name).get_all_streams(): xmd = mmd.get_xmd() xmd["mbs"] = {} xmd["mbs"]["koji_tag"] = "repofile://" + repo.repofile xmd["mbs"]["mse"] = True xmd["mbs"]["commit"] = "unknown" mmd.set_xmd(xmd) import_mmd(db_session, mmd, False) if not platform_id: # Parse the /etc/os-release to find out the local platform:stream. with open("/etc/os-release", "r") as fd: for l in fd.readlines(): if not l.startswith("PLATFORM_ID"): continue platform_id = l.split("=")[1].strip("\"' \n") if not platform_id: raise ValueError("Cannot get PLATFORM_ID from /etc/os-release.") # Create the fake platform:stream:1:000000 module to fulfill the # dependencies for local offline build and also to define the # srpm-buildroot and buildroot. import_fake_base_module("%s:1:000000" % platform_id)
def test_import_mmd_base_module(stream, disttag_marking, error_msg): clean_database(add_platform_module=False) mmd = load_mmd(read_staged_data("platform")) mmd = mmd.copy(mmd.get_module_name(), stream) if disttag_marking: xmd = mmd.get_xmd() xmd["mbs"]["disttag_marking"] = disttag_marking mmd.set_xmd(xmd) if error_msg: with pytest.raises(UnprocessableEntity, match=error_msg): import_mmd(db_session, mmd) else: import_mmd(db_session, mmd)
def post(self, api_version): # disable this API endpoint if no groups are defined if not conf.allowed_groups_to_import_module: log.error( "Import module API is disabled. Set 'ALLOWED_GROUPS_TO_IMPORT_MODULE'" " configuration value first.") raise Forbidden("Import module API is disabled.") # auth checks username, groups = module_build_service.web.auth.get_user(request) ModuleBuildAPI.check_groups( username, groups, allowed_groups=conf.allowed_groups_to_import_module) # process request using SCM handler handler = SCMHandler(request) handler.validate(skip_branch=True, skip_optional_params=True) mmd, _ = fetch_mmd(handler.data["scmurl"], mandatory_checks=False) build, messages = import_mmd(db.session, mmd) json_data = { "module": build.json(db.session, show_tasks=False), "messages": messages } # return 201 Created if we reach this point return jsonify(json_data), 201
def _create_test_modules(self, koji_tag_with_modules="foo-test"): mmd = load_mmd(tests.read_staged_data("platform")) mmd = mmd.copy(mmd.get_module_name(), "f30.1.3") import_mmd(db_session, mmd) platform = db_session.query(ModuleBuild).filter_by( stream="f30.1.3").one() if koji_tag_with_modules: platform = db_session.query(ModuleBuild).filter_by( stream="f30.1.3").one() platform_mmd = platform.mmd() platform_xmd = platform_mmd.get_xmd() platform_xmd["mbs"][ "koji_tag_with_modules"] = koji_tag_with_modules platform_mmd.set_xmd(platform_xmd) platform.modulemd = mmd_to_str(platform_mmd) for context in ["7c29193d", "7c29193e"]: mmd = tests.make_module("testmodule:master:20170109091357:" + context) build = ModuleBuild( name="testmodule", stream="master", version=20170109091357, state=5, build_context="dd4de1c346dcf09ce77d38cd4e75094ec1c08ec3", runtime_context="ec4de1c346dcf09ce77d38cd4e75094ec1c08ef7", context=context, koji_tag="module-testmodule-master-20170109091357-" + context, scmurl= "https://src.stg.fedoraproject.org/modules/testmodule.git?#ff1ea79", batch=3, owner="Dr. Pepper", time_submitted=datetime(2018, 11, 15, 16, 8, 18), time_modified=datetime(2018, 11, 15, 16, 19, 35), rebuild_strategy="changed-and-after", modulemd=mmd_to_str(mmd), ) build.buildrequires.append(platform) db_session.add(build) db_session.commit()
def test_import_mmd_minimal_xmd_from_local_repository(): mmd = load_mmd(read_staged_data("formatted_testmodule")) xmd = mmd.get_xmd() xmd["mbs"] = {} xmd["mbs"]["koji_tag"] = "repofile:///etc/yum.repos.d/fedora-modular.repo" xmd["mbs"]["mse"] = True xmd["mbs"]["commit"] = "unknown" mmd.set_xmd(xmd) build, msgs = import_mmd(db_session, mmd, False) assert build.name == mmd.get_module_name()
def init_data(data_size=10, contexts=False, multiple_stream_versions=None, scratch=False): """ Creates data_size * 3 modules in database in different states and with different component builds. See _populate_data for more info. :param bool contexts: If True, multiple streams and contexts in each stream are generated for 'nginx' module. :param list/bool multiple_stream_versions: If true, multiple base modules with difference stream versions are generated. If set to list, the list defines the generated base module streams. (!) This method is not responsible for cleaning the database, use appropriate fixture. """ if multiple_stream_versions: if multiple_stream_versions is True: multiple_stream_versions = [ "f28.0.0", "f29.0.0", "f29.1.0", "f29.2.0" ] mmd = load_mmd(read_staged_data("platform")) for stream in multiple_stream_versions: mmd = mmd.copy("platform", stream) # Set the virtual_streams based on "fXY" to mark the platform streams # with the same major stream_version compatible. xmd = mmd.get_xmd() xmd["mbs"]["virtual_streams"] = [stream[:3]] mmd.set_xmd(xmd) import_mmd(db.session, mmd) # Just to possibly confuse tests by adding another base module. mmd = mmd.copy("bootstrap", stream) import_mmd(db.session, mmd) _populate_data(data_size, contexts=contexts, scratch=scratch)
def import_fake_base_module(nsvc): """ Creates and imports new fake base module to be used with offline local builds. :param str nsvc: name:stream:version:context of a module. """ name, stream, version, context = nsvc.split(":") mmd = Modulemd.ModuleStreamV2.new(name, stream) mmd.set_version(int(version)) mmd.set_context(context) mmd.set_summary("fake base module") mmd.set_description("fake base module") mmd.add_module_license("GPL") buildroot = Modulemd.Profile.new("buildroot") for rpm in conf.default_buildroot_packages: buildroot.add_rpm(rpm) mmd.add_profile(buildroot) srpm_buildroot = Modulemd.Profile.new("srpm-buildroot") for rpm in conf.default_srpm_buildroot_packages: srpm_buildroot.add_rpm(rpm) mmd.add_profile(srpm_buildroot) xmd = {"mbs": {}} xmd_mbs = xmd["mbs"] xmd_mbs["buildrequires"] = {} xmd_mbs["requires"] = {} xmd_mbs["commit"] = "ref_%s" % context xmd_mbs["mse"] = "true" # Use empty "repofile://" URI for base module. The base module will use the # `conf.base_module_names` list as list of default repositories. xmd_mbs["koji_tag"] = "repofile://" mmd.set_xmd(xmd) import_mmd(db_session, mmd, False)
def test_import_mmd_remove_dropped_virtual_streams(): mmd = load_mmd(read_staged_data("formatted_testmodule")) # Add some virtual streams xmd = mmd.get_xmd() xmd["mbs"]["virtual_streams"] = ["f28", "f29", "f30"] mmd.set_xmd(xmd) # Import mmd into database to simulate the next step to reimport a module import_mmd(db_session, mmd) # Now, remove some virtual streams from module metadata xmd = mmd.get_xmd() xmd["mbs"]["virtual_streams"] = ["f28", "f29"] # Note that, f30 is removed mmd.set_xmd(xmd) # Test import modulemd again and the f30 should be removed from database. module_build, _ = import_mmd(db_session, mmd) db_session.refresh(module_build) assert ["f28", "f29"] == sorted(item.name for item in module_build.virtual_streams) assert 0 == db_session.query( models.VirtualStream).filter_by(name="f30").count()
def test_import_mmd_contexts(context): mmd = load_mmd(read_staged_data("formatted_testmodule")) mmd.set_context(context) xmd = mmd.get_xmd() xmd["mbs"]["koji_tag"] = "foo" mmd.set_xmd(xmd) build, msgs = import_mmd(db_session, mmd) mmd_context = build.mmd().get_context() if context: assert mmd_context == context assert build.context == context else: assert mmd_context == models.DEFAULT_MODULE_CONTEXT assert build.context == models.DEFAULT_MODULE_CONTEXT
def test_get_reusable_module_koji_resolver(self, resolver, ClientSession, cfg, allow_ocbm): """ Test that get_reusable_module works with KojiResolver. """ cfg.return_value = allow_ocbm # Mock the listTagged so the testmodule:master is listed as tagged in the # module-fedora-27-build Koji tag. koji_session = ClientSession.return_value koji_session.listTagged.return_value = [{ "build_id": 123, "name": "testmodule", "version": "master", "release": "20170109091357.78e4a6fd", "tag_name": "module-fedora-27-build" }] koji_session.multiCall.return_value = [ [build] for build in koji_session.listTagged.return_value ] # Mark platform:f28 as KojiResolver ready by defining "koji_tag_with_modules". # Also define the "virtual_streams" to possibly confuse the get_reusable_module. platform_f28 = db_session.query( models.ModuleBuild).filter_by(name="platform").one() mmd = platform_f28.mmd() xmd = mmd.get_xmd() xmd["mbs"]["virtual_streams"] = ["fedora"] xmd["mbs"]["koji_tag_with_modules"] = "module-fedora-27-build" mmd.set_xmd(xmd) platform_f28.modulemd = mmd_to_str(mmd) platform_f28.update_virtual_streams(db_session, ["fedora"]) # Create platform:f27 without KojiResolver support. mmd = load_mmd(read_staged_data("platform")) mmd = mmd.copy("platform", "f27") xmd = mmd.get_xmd() xmd["mbs"]["virtual_streams"] = ["fedora"] mmd.set_xmd(xmd) platform_f27 = import_mmd(db_session, mmd)[0] # Change the reusable testmodule:master to buildrequire platform:f27. latest_module = db_session.query(models.ModuleBuild).filter_by( name="testmodule", state=models.BUILD_STATES["ready"]).one() mmd = latest_module.mmd() xmd = mmd.get_xmd() xmd["mbs"]["buildrequires"]["platform"]["stream"] = "f27" mmd.set_xmd(xmd) latest_module.modulemd = mmd_to_str(mmd) latest_module.buildrequires = [platform_f27] # Recompute the build_context and ensure that `build_context` changed while # `build_context_no_bms` did not change. contexts = models.ModuleBuild.contexts_from_mmd(latest_module.modulemd) assert latest_module.build_context_no_bms == contexts.build_context_no_bms assert latest_module.build_context != contexts.build_context latest_module.build_context = contexts.build_context latest_module.build_context_no_bms = contexts.build_context_no_bms db_session.commit() # Get the module we want to build. module = db_session.query(models.ModuleBuild)\ .filter_by(name="testmodule")\ .filter_by(state=models.BUILD_STATES["build"])\ .one() reusable_module = get_reusable_module(module) assert reusable_module.id == latest_module.id
def test_get_reusable_module_use_latest_build(self, cfg, allow_ocbm): """ Test that the `get_reusable_module` tries to reuse the latest module in case when multiple modules can be reused allow_only_compatible_base_modules is True. """ cfg.return_value = allow_ocbm # Set "fedora" virtual stream to platform:f28. platform_f28 = db_session.query( models.ModuleBuild).filter_by(name="platform").one() mmd = platform_f28.mmd() xmd = mmd.get_xmd() xmd["mbs"]["virtual_streams"] = ["fedora"] mmd.set_xmd(xmd) platform_f28.modulemd = mmd_to_str(mmd) platform_f28.update_virtual_streams(db_session, ["fedora"]) # Create platform:f29 with "fedora" virtual stream. mmd = load_mmd(read_staged_data("platform")) mmd = mmd.copy("platform", "f29") xmd = mmd.get_xmd() xmd["mbs"]["virtual_streams"] = ["fedora"] mmd.set_xmd(xmd) platform_f29 = import_mmd(db_session, mmd)[0] # Create another copy of `testmodule:master` which should be reused, because its # stream version will be higher than the previous one. Also set its buildrequires # to platform:f29. latest_module = db_session.query(models.ModuleBuild).filter_by( name="testmodule", state=models.BUILD_STATES["ready"]).one() # This is used to clone the ModuleBuild SQLAlchemy object without recreating it from # scratch. db_session.expunge(latest_module) make_transient(latest_module) # Change the platform:f28 buildrequirement to platform:f29 and recompute the build_context. mmd = latest_module.mmd() xmd = mmd.get_xmd() xmd["mbs"]["buildrequires"]["platform"]["stream"] = "f29" mmd.set_xmd(xmd) latest_module.modulemd = mmd_to_str(mmd) contexts = models.ModuleBuild.contexts_from_mmd(latest_module.modulemd) latest_module.build_context = contexts.build_context latest_module.context = contexts.context latest_module.buildrequires = [platform_f29] # Set the `id` to None, so new one is generated by SQLAlchemy. latest_module.id = None db_session.add(latest_module) db_session.commit() module = db_session.query(models.ModuleBuild)\ .filter_by(name="testmodule")\ .filter_by(state=models.BUILD_STATES["build"])\ .one() db_session.commit() reusable_module = get_reusable_module(module) if allow_ocbm: assert reusable_module.id == latest_module.id else: # There are two testmodules in ready state, the first one with # lower id is what we want. first_module = db_session.query(models.ModuleBuild).filter_by( name="testmodule", state=models.BUILD_STATES["ready"]).order_by( models.ModuleBuild.id).first() assert reusable_module.id == first_module.id
def require_platform_and_default_arch(require_default_arch): """Provides clean database with platform module and a default arch""" mmd = load_mmd(read_staged_data("platform")) import_mmd(db_session, mmd)
def resolve_requires(self, requires): """ Resolves the requires list of N:S or N:S:V:C to a dictionary with keys as the module name and the values as a dictionary with keys of ref, stream, version. If there are some modules loaded by load_local_builds(...), these local modules will be considered when resolving the requires. A RuntimeError is raised on MBS lookup errors. :param requires: a list of N:S or N:S:V:C strings :return: a dictionary """ new_requires = {} for nsvc in requires: nsvc_splitted = nsvc.split(":") if len(nsvc_splitted) == 2: module_name, module_stream = nsvc_splitted module_version = None module_context = None elif len(nsvc_splitted) == 4: module_name, module_stream, module_version, module_context = nsvc_splitted else: raise ValueError( "Only N:S or N:S:V:C is accepted by resolve_requires, got %s" % nsvc) # Try to find out module dependency in the local module builds # added by load_local_builds(...). local_modules = models.ModuleBuild.local_modules( self.db_session, module_name, module_stream) if local_modules: local_build = local_modules[0] new_requires[module_name] = { # The commit ID isn't currently saved in modules.yaml "ref": None, "stream": local_build.stream, "version": local_build.version, "context": local_build.context, "koji_tag": local_build.koji_tag, # No need to set filtered_rpms for local builds, because MBS # filters the RPMs automatically when the module build is # done. "filtered_rpms": [], } continue commit_hash = None version = None filtered_rpms = [] module = self.get_module(module_name, module_stream, module_version, module_context, strict=True) if module.get("modulemd"): mmd = load_mmd(module["modulemd"]) if mmd.get_xmd().get("mbs", {}).get("commit"): commit_hash = mmd.get_xmd()["mbs"]["commit"] # Find out the particular NVR of filtered packages if "rpms" in module and mmd.get_rpm_filters(): for rpm in module["rpms"]: nvr = kobo.rpmlib.parse_nvra(rpm) # If the package is not filtered, continue if not nvr["name"] in mmd.get_rpm_filters(): continue # If the nvr is already in filtered_rpms, continue nvr = kobo.rpmlib.make_nvr(nvr, force_epoch=True) if nvr in filtered_rpms: continue filtered_rpms.append(nvr) if module.get("version"): version = module["version"] if version and commit_hash: new_requires[module_name] = { "ref": commit_hash, "stream": module_stream, "version": str(version), "context": module["context"], "koji_tag": module["koji_tag"], "filtered_rpms": filtered_rpms, } else: raise RuntimeError( 'The module "{0}" didn\'t contain either a commit hash or a' " version in MBS".format(module_name)) # If the module is a base module, then import it in the database so that entries in # the module_builds_to_module_buildrequires table can be created later on if module_name in conf.base_module_names: import_mmd(self.db_session, mmd) return new_requires
def import_module(mmd_file): """ Imports the module from mmd_file """ mmd = load_mmd_file(mmd_file) import_mmd(db.session, mmd)