def test__get_base_module_mmds_virtual_streams(self, virtual_streams): """Ensure the correct results are returned without duplicates.""" init_data(data_size=1, multiple_stream_versions=True) mmd = load_mmd(read_staged_data("testmodule_v2")) deps = mmd.get_dependencies()[0] new_deps = Modulemd.Dependencies() for stream in deps.get_runtime_streams("platform"): new_deps.add_runtime_stream("platform", stream) new_deps.add_buildtime_stream("platform", "f29.2.0") mmd.remove_dependencies(deps) mmd.add_dependencies(new_deps) make_module_in_db("platform:lp29.1.1:12:c11", virtual_streams=virtual_streams) mmds = get_base_module_mmds(db_session, mmd) if virtual_streams == ["f29"]: expected = { "platform:f29.0.0", "platform:f29.1.0", "platform:f29.2.0", "platform:lp29.1.1" } else: expected = { "platform:f29.0.0", "platform:f29.1.0", "platform:f29.2.0" } # Verify no duplicates were returned before doing set operations assert len(mmds["ready"]) == len(expected) # Verify the expected ones were returned actual = set() for mmd_ in mmds["ready"]: actual.add("{}:{}".format(mmd_.get_module_name(), mmd_.get_stream_name())) assert actual == expected
def test__get_base_module_mmds_virtual_streams_only_major_versions( self, cfg): """Ensure the correct results are returned without duplicates.""" init_data(data_size=1, multiple_stream_versions=["foo28", "foo29", "foo30"]) # Mark platform:foo28 as garbage to test that it is still considered as compatible. platform = db_session.query(models.ModuleBuild).filter_by( name="platform", stream="foo28").first() platform.state = "garbage" db_session.add(platform) db_session.commit() mmd = load_mmd(read_staged_data("testmodule_v2")) deps = mmd.get_dependencies()[0] new_deps = Modulemd.Dependencies() for stream in deps.get_runtime_streams("platform"): new_deps.add_runtime_stream("platform", stream) new_deps.add_buildtime_stream("platform", "foo29") mmd.remove_dependencies(deps) mmd.add_dependencies(new_deps) mmds = get_base_module_mmds(db_session, mmd) expected = {} expected["ready"] = {"platform:foo29", "platform:foo30"} expected["garbage"] = {"platform:foo28"} # Verify no duplicates were returned before doing set operations assert len(mmds) == len(expected) for k in expected.keys(): assert len(mmds[k]) == len(expected[k]) # Verify the expected ones were returned actual = set() for mmd_ in mmds[k]: actual.add("{}:{}".format(mmd_.get_module_name(), mmd_.get_stream_name())) assert actual == expected[k]
def get_reusable_module(module): """ Returns previous module build of the module `module` in case it can be used as a source module to get the components to reuse from. In case there is no such module, returns None. :param module: the ModuleBuild object of module being built. :return: ModuleBuild object which can be used for component reuse. """ if module.reused_module: return module.reused_module mmd = module.mmd() previous_module_build = None # The `base_mmds` will contain the list of base modules against which the possible modules # to reuse are built. There are three options how these base modules are found: # # 1) The `conf.allow_only_compatible_base_modules` is False. This means that MBS should # not try to find any compatible base modules in its DB and simply use the buildrequired # base module as it is. # 2) The `conf.allow_only_compatible_base_modules` is True and DBResolver is used. This means # that MBS should try to find the compatible modules using its database. # The `get_base_module_mmds` finds out the list of compatible modules and returns mmds of # all of them. # 3) The `conf.allow_only_compatible_base_modules` is True and KojiResolver is used. This # means that MBS should *not* try to find any compatible base modules in its DB, but # instead just query Koji using KojiResolver later to find out the module to # reuse. The list of compatible base modules is defined by Koji tag inheritance directly # in Koji. # The `get_base_module_mmds` in this case returns just the buildrequired base module. if conf.allow_only_compatible_base_modules: log.debug("Checking for compatible base modules") base_mmds = get_base_module_mmds(db_session, mmd)["ready"] # Sort the base_mmds based on the stream version, higher version first. base_mmds.sort(key=lambda mmd: models.ModuleBuild.get_stream_version( mmd.get_stream_name(), False), reverse=True) else: log.debug("Skipping the check for compatible base modules") base_mmds = [] for br in module.buildrequires: if br.name in conf.base_module_names: base_mmds.append(br.mmd()) for base_mmd in base_mmds: previous_module_build = (db_session.query( models.ModuleBuild).filter_by( name=mmd.get_module_name()).filter_by( stream=mmd.get_stream_name()).filter_by( state=models.BUILD_STATES["ready"]).filter( models.ModuleBuild.scmurl.isnot(None)).order_by( models.ModuleBuild.time_completed.desc())) koji_resolver_enabled = base_mmd.get_xmd().get( "mbs", {}).get("koji_tag_with_modules") if koji_resolver_enabled: # Find ModuleBuilds tagged in the Koji tag using KojiResolver. resolver = GenericResolver.create(db_session, conf, backend="koji") possible_modules_to_reuse = resolver.get_buildrequired_modules( module.name, module.stream, base_mmd) # Limit the query to these modules. possible_module_ids = [m.id for m in possible_modules_to_reuse] previous_module_build = previous_module_build.filter( models.ModuleBuild.id.in_(possible_module_ids)) # Limit the query to modules sharing the same `build_context_no_bms`. That means they # have the same buildrequirements. previous_module_build = previous_module_build.filter_by( build_context_no_bms=module.build_context_no_bms) else: # Recompute the build_context with compatible base module stream. mbs_xmd = mmd.get_xmd()["mbs"] if base_mmd.get_module_name() not in mbs_xmd["buildrequires"]: previous_module_build = None continue mbs_xmd["buildrequires"][base_mmd.get_module_name()]["stream"] \ = base_mmd.get_stream_name() build_context = module.calculate_build_context( mbs_xmd["buildrequires"]) # Limit the query to find only modules sharing the same build_context. previous_module_build = previous_module_build.filter_by( build_context=build_context) # If we are rebuilding with the "changed-and-after" option, then we can't reuse # components from modules that were built more liberally if module.rebuild_strategy == "changed-and-after": previous_module_build = previous_module_build.filter( models.ModuleBuild.rebuild_strategy.in_( ["all", "changed-and-after"])) previous_module_build = previous_module_build.first() if previous_module_build: break # The component can't be reused if there isn't a previous build in the done # or ready state if not previous_module_build: log.info("Cannot re-use. %r is the first module build." % module) return None module.reused_module_id = previous_module_build.id db_session.commit() return previous_module_build
def get_mmds_required_by_module_recursively(db_session, mmd, default_streams=None, raise_if_stream_ambigous=False): """ Returns the list of Module metadata objects of all modules required while building the module defined by `mmd` module metadata. This presumes the module metadata streams are expanded using `expand_mse_streams(...)` method. This method finds out latest versions of all the build-requires of the `mmd` module and then also all contexts of these latest versions. For each build-required name:stream:version:context module, it checks recursively all the "requires" and finds the latest version of each required module and also all contexts of these latest versions. :param db_session: SQLAlchemy database session. :param dict default_streams: Dict in {module_name: module_stream, ...} format defining the default stream to choose for module in case when there are multiple streams to choose from. :param bool raise_if_stream_ambigous: When True, raises a StreamAmbigous exception in case there are multiple streams for some dependency of module and the module name is not defined in `default_streams`, so it is not clear which stream should be used. :rtype: list of Modulemd metadata :return: List of all modulemd metadata of all modules required to build the module `mmd`. """ # We use dict with name:stream as a key and list with mmds as value. # That way, we can ensure we won't have any duplicate mmds in a resulting # list and we also don't waste resources on getting the modules we already # handled from DB. mmds = {} # Get the MMDs of all compatible base modules based on the buildrequires. base_module_mmds = get_base_module_mmds(db_session, mmd) if not base_module_mmds["ready"]: base_module_choices = " or ".join(conf.base_module_names) raise UnprocessableEntity( "None of the base module ({}) streams in the buildrequires section could be found" .format(base_module_choices)) # Add base modules to `mmds`. for base_module in base_module_mmds["ready"]: ns = ":".join( [base_module.get_module_name(), base_module.get_stream_name()]) mmds.setdefault(ns, []) mmds[ns].append(base_module) # The currently submitted module build must be built only against "ready" base modules, # but its dependencies might have been built against some old platform which is already # EOL ("garbage" state). In order to find such old module builds, we need to include # also EOL platform streams. all_base_module_mmds = base_module_mmds["ready"] + base_module_mmds[ "garbage"] # Get all the buildrequires of the module of interest. for deps in mmd.get_dependencies(): deps_dict = deps_to_dict(deps, 'buildtime') mmds = _get_mmds_from_requires(db_session, deps_dict, mmds, False, default_streams, raise_if_stream_ambigous, all_base_module_mmds) # Now get the requires of buildrequires recursively. for mmd_key in list(mmds.keys()): for mmd in mmds[mmd_key]: for deps in mmd.get_dependencies(): deps_dict = deps_to_dict(deps, 'runtime') mmds = _get_mmds_from_requires(db_session, deps_dict, mmds, True, default_streams, raise_if_stream_ambigous, all_base_module_mmds) # Make single list from dict of lists. res = [] for ns, mmds_list in mmds.items(): if len(mmds_list) == 0: raise UnprocessableEntity("Cannot find any module builds for %s" % (ns)) res += mmds_list return res