def tweak(easyconfigs, build_specs): """Tweak list of easyconfigs according to provided build specifications.""" # make sure easyconfigs all feature the same toolchain (otherwise we *will* run into trouble) toolchains = nub(['%(name)s/%(version)s' % ec['ec']['toolchain'] for ec in easyconfigs]) if len(toolchains) > 1: _log.error("Multiple toolchains featured in easyconfigs, --try-X not supported in that case: %s" % toolchains) # obtain full dependency graph for specified easyconfigs # easyconfigs will be ordered 'top-to-bottom': toolchain dependencies and toolchain first orig_ecs = resolve_dependencies(easyconfigs, retain_all_deps=True) # determine toolchain based on last easyconfigs toolchain = orig_ecs[-1]['ec']['toolchain'] _log.debug("Filtering using toolchain %s" % toolchain) # filter easyconfigs unless a dummy toolchain is used: drop toolchain and toolchain dependencies if toolchain['name'] != DUMMY_TOOLCHAIN_NAME: while orig_ecs[0]['ec']['toolchain'] != toolchain: orig_ecs = orig_ecs[1:] # generate tweaked easyconfigs, and continue with those instead easyconfigs = [] for orig_ec in orig_ecs: new_ec_file = tweak_one(orig_ec['spec'], None, build_specs) new_ecs = process_easyconfig(new_ec_file, build_specs=build_specs) easyconfigs.extend(new_ecs) return easyconfigs
def tweak(easyconfigs, build_specs): """Tweak list of easyconfigs according to provided build specifications.""" # make sure easyconfigs all feature the same toolchain (otherwise we *will* run into trouble) toolchains = nub( ['%(name)s/%(version)s' % ec['ec']['toolchain'] for ec in easyconfigs]) if len(toolchains) > 1: _log.error( "Multiple toolchains featured in easyconfigs, --try-X not supported in that case: %s" % toolchains) # obtain full dependency graph for specified easyconfigs # easyconfigs will be ordered 'top-to-bottom': toolchain dependencies and toolchain first orig_ecs = resolve_dependencies(easyconfigs, retain_all_deps=True) # determine toolchain based on last easyconfigs toolchain = orig_ecs[-1]['ec']['toolchain'] _log.debug("Filtering using toolchain %s" % toolchain) # filter easyconfigs unless a dummy toolchain is used: drop toolchain and toolchain dependencies if toolchain['name'] != DUMMY_TOOLCHAIN_NAME: while orig_ecs[0]['ec']['toolchain'] != toolchain: orig_ecs = orig_ecs[1:] # generate tweaked easyconfigs, and continue with those instead easyconfigs = [] for orig_ec in orig_ecs: new_ec_file = tweak_one(orig_ec['spec'], None, build_specs) new_ecs = process_easyconfig(new_ec_file, build_specs=build_specs) easyconfigs.extend(new_ecs) return easyconfigs
def det_toolchain_element_details(tc, elem): """ Determine details of a particular toolchain element, for a given Toolchain instance. """ # check for cached version first tc_dict = tc.as_dict() key = (tc_dict['name'], tc_dict['version'] + tc_dict['versionsuffix'], elem) if key in _toolchain_details_cache: _log.debug("Obtained details for '%s' in toolchain '%s' from cache" % (elem, tc_dict)) return _toolchain_details_cache[key] # grab version from parsed easyconfig file for toolchain eb_file = robot_find_easyconfig(tc_dict['name'], det_full_ec_version(tc_dict)) tc_ec = process_easyconfig(eb_file, parse_only=True) if len(tc_ec) > 1: _log.warning("More than one toolchain specification found for %s, only retaining first" % tc_dict) _log.debug("Full list of toolchain specifications: %s" % tc_ec) tc_ec = tc_ec[0]['ec'] tc_deps = tc_ec['dependencies'] tc_elem_details = None for tc_dep in tc_deps: if tc_dep['name'] == elem: tc_elem_details = tc_dep _log.debug("Found details for toolchain element %s: %s" % (elem, tc_elem_details)) break if tc_elem_details is None: # for compiler-only toolchains, toolchain and compilers are one-and-the-same if tc_ec['name'] == elem: tc_elem_details = tc_ec else: raise EasyBuildError("No toolchain element '%s' found for toolchain %s: %s", elem, tc.as_dict(), tc_ec) _toolchain_details_cache[key] = tc_elem_details _log.debug("Obtained details for '%s' in toolchain '%s', added to cache" % (elem, tc_dict)) return _toolchain_details_cache[key]
def det_toolchain_element_details(tc, elem): """ Determine details of a particular toolchain element, for a given Toolchain instance. """ # check for cached version first tc_dict = tc.as_dict() key = (tc_dict['name'], tc_dict['version'] + tc_dict['versionsuffix'], elem) if key in _toolchain_details_cache: _log.debug("Obtained details for '%s' in toolchain '%s' from cache" % (elem, tc_dict)) return _toolchain_details_cache[key] # grab version from parsed easyconfig file for toolchain eb_file = robot_find_easyconfig(tc_dict['name'], det_full_ec_version(tc_dict)) tc_ec = process_easyconfig(eb_file, parse_only=True) if len(tc_ec) > 1: _log.warning("More than one toolchain specification found for %s, only retaining first" % tc_dict) _log.debug("Full list of toolchain specifications: %s" % tc_ec) tc_ec = tc_ec[0]['ec'] tc_deps = tc_ec['dependencies'] tc_elem_details = None for tc_dep in tc_deps: if tc_dep['name'] == elem: tc_elem_details = tc_dep _log.debug("Found details for toolchain element %s: %s" % (elem, tc_elem_details)) break if tc_elem_details is None: # for compiler-only toolchains, toolchain and compilers are one-and-the-same if tc_ec['name'] == elem: tc_elem_details = tc_ec else: _log.error("No toolchain element '%s' found for toolchain %s: %s" % (elem, tc.as_dict(), tc_ec)) _toolchain_details_cache[key] = tc_elem_details _log.debug("Obtained details for '%s' in toolchain '%s', added to cache" % (elem, tc_dict)) return _toolchain_details_cache[key]
def parse_easyconfigs(paths): """ Parse easyconfig files @params paths: paths to easyconfigs """ easyconfigs = [] generated_ecs = False for (path, generated) in paths: path = os.path.abspath(path) # keep track of whether any files were generated generated_ecs |= generated if not os.path.exists(path): raise EasyBuildError("Can't find path %s", path) try: ec_files = find_easyconfigs( path, ignore_dirs=build_option('ignore_dirs')) for ec_file in ec_files: # only pass build specs when not generating easyconfig files kwargs = {} if not build_option('try_to_generate'): kwargs['build_specs'] = build_option('build_specs') ecs = process_easyconfig(ec_file, **kwargs) easyconfigs.extend(ecs) except IOError, err: raise EasyBuildError( "Processing easyconfigs in path %s failed: %s", path, err)
def parse_easyconfigs(paths, validate=True): """ Parse easyconfig files :param paths: paths to easyconfigs """ easyconfigs = [] generated_ecs = False for (path, generated) in paths: path = os.path.abspath(path) # keep track of whether any files were generated generated_ecs |= generated if not os.path.exists(path): raise EasyBuildError("Can't find path %s", path) try: ec_files = find_easyconfigs(path, ignore_dirs=build_option('ignore_dirs')) for ec_file in ec_files: kwargs = {'validate': validate} # only pass build specs when not generating easyconfig files if not build_option('try_to_generate'): kwargs['build_specs'] = build_option('build_specs') easyconfigs.extend(process_easyconfig(ec_file, **kwargs)) except IOError, err: raise EasyBuildError("Processing easyconfigs in path %s failed: %s", path, err)
def test_map_easyconfig_to_target_tc_hierarchy(self): """Test mapping of easyconfig to target hierarchy""" test_easyconfigs = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'easyconfigs', 'test_ecs') init_config(build_options={ 'robot_path': test_easyconfigs, 'silent': True, 'valid_module_classes': module_classes(), }) get_toolchain_hierarchy.clear() gcc_binutils_tc = {'name': 'GCC', 'version': '4.9.3-2.26'} iccifort_binutils_tc = {'name': 'iccifort', 'version': '2016.1.150-GCC-4.9.3-2.25'} # The below mapping includes a binutils mapping (2.26 to 2.25) tc_mapping = map_toolchain_hierarchies(gcc_binutils_tc, iccifort_binutils_tc, self.modtool) ec_spec = os.path.join(test_easyconfigs, 'h', 'hwloc', 'hwloc-1.6.2-GCC-4.9.3-2.26.eb') tweaked_spec = map_easyconfig_to_target_tc_hierarchy(ec_spec, tc_mapping) tweaked_ec = process_easyconfig(tweaked_spec)[0] tweaked_dict = tweaked_ec['ec'].asdict() # First check the mapped toolchain key, value = 'toolchain', iccifort_binutils_tc self.assertTrue(key in tweaked_dict and value == tweaked_dict[key]) # Also check that binutils has been mapped for key, value in {'name': 'binutils', 'version': '2.25', 'versionsuffix': ''}.items(): self.assertTrue(key in tweaked_dict['builddependencies'][0] and value == tweaked_dict['builddependencies'][0][key])
def tweak(easyconfigs, build_specs, modtool, targetdir=None): """Tweak list of easyconfigs according to provided build specifications.""" # make sure easyconfigs all feature the same toolchain (otherwise we *will* run into trouble) toolchains = nub( ['%(name)s/%(version)s' % ec['ec']['toolchain'] for ec in easyconfigs]) if len(toolchains) > 1: raise EasyBuildError( "Multiple toolchains featured in easyconfigs, --try-X not supported in that case: %s", toolchains) if 'name' in build_specs or 'version' in build_specs: # no recursion if software name/version build specification are included # in that case, do not construct full dependency graph orig_ecs = easyconfigs _log.debug( "Software name/version found, so not applying build specifications recursively: %s" % build_specs) else: # build specifications should be applied to the whole dependency graph # obtain full dependency graph for specified easyconfigs # easyconfigs will be ordered 'top-to-bottom': toolchain dependencies and toolchain first _log.debug( "Applying build specifications recursively (no software name/version found): %s" % build_specs) orig_ecs = resolve_dependencies(easyconfigs, modtool, retain_all_deps=True) # keep track of originally listed easyconfigs (via their path) listed_ec_paths = [ec['spec'] for ec in easyconfigs] # obtain full dependency graph for specified easyconfigs # easyconfigs will be ordered 'top-to-bottom': toolchain dependencies and toolchain first orig_ecs = resolve_dependencies(easyconfigs, modtool, retain_all_deps=True) # determine toolchain based on last easyconfigs toolchain = orig_ecs[-1]['ec']['toolchain'] _log.debug("Filtering using toolchain %s" % toolchain) # filter easyconfigs unless a dummy toolchain is used: drop toolchain and toolchain dependencies if toolchain['name'] != DUMMY_TOOLCHAIN_NAME: while orig_ecs[0]['ec']['toolchain'] != toolchain: orig_ecs = orig_ecs[1:] # generate tweaked easyconfigs, and continue with those instead tweaked_easyconfigs = [] for orig_ec in orig_ecs: new_ec_file = tweak_one(orig_ec['spec'], None, build_specs, targetdir=targetdir) # only return tweaked easyconfigs for easyconfigs which were listed originally # easyconfig files for dependencies are also generated but not included, and will be resolved via --robot if orig_ec['spec'] in listed_ec_paths: new_ecs = process_easyconfig(new_ec_file, build_specs=build_specs) tweaked_easyconfigs.extend(new_ecs) return tweaked_easyconfigs
def test_find_minimally_resolved_modules(self): """Test find_minimally_resolved_modules function.""" # replace log.experimental with log.warning to allow experimental code easybuild.framework.easyconfig.tools._log.experimental = easybuild.framework.easyconfig.tools._log.warning test_easyconfigs = os.path.join( os.path.dirname(os.path.abspath(__file__)), 'easyconfigs') init_config( build_options={ 'valid_module_classes': module_classes(), 'robot_path': test_easyconfigs, }) barec = os.path.join(self.test_prefix, 'bar-1.2.3-goolf-1.4.10.eb') barec_txt = '\n'.join([ "easyblock = 'ConfigureMake'", "name = 'bar'", "version = '1.2.3'", "homepage = 'http://example.com'", "description = 'foo'", "toolchain = {'name': 'goolf', 'version': '1.4.10'}", # deliberately listing components of toolchain as dependencies without specifying subtoolchains, # to test resolving of dependencies with minimal toolchain # for each of these, we know test easyconfigs are available (which are required here) "dependencies = [", " ('OpenMPI', '1.6.4'),", # available with GCC/4.7.2 " ('OpenBLAS', '0.2.6', '-LAPACK-3.4.2'),", # available with gompi/1.4.10 " ('ScaLAPACK', '2.0.2', '-OpenBLAS-0.2.6-LAPACK-3.4.2'),", # available with gompi/1.4.10 " ('SQLite', '3.8.10.2'),", # only available with goolf/1.4.10 "]", ]) write_file(barec, barec_txt) bar = process_easyconfig(barec)[0] ecs = [bar] mods = [ 'gompi/1.4.10', 'goolf/1.4.10', # include modules for dependencies, with subtoolchains rather than full toolchain (except for SQLite) 'OpenMPI/1.6.4-GCC-4.7.2', 'OpenBLAS/0.2.6-gompi-1.4.10-LAPACK-3.4.2', 'ScaLAPACK/2.0.2-gompi-1.4.10-OpenBLAS-0.2.6-LAPACK-3.4.2', 'SQLite/3.8.10.2-GCC-4.7.2', ] ordered_ecs, new_easyconfigs, new_avail_modules = find_minimally_resolved_modules( ecs, mods, []) # all dependencies are resolved for easyconfigs included in ordered_ecs self.assertEqual(len(ordered_ecs), 1) self.assertEqual(ordered_ecs[0]['dependencies'], []) # module is added to list of available modules self.assertTrue(bar['ec'].full_mod_name in new_avail_modules) # nothing left self.assertEqual(new_easyconfigs, [])
def map_easyconfig_to_target_tc_hierarchy(ec_spec, toolchain_mapping, targetdir=None): """ Take an easyconfig spec, parse it, map it to a target toolchain and dump it out :param ec_spec: Location of original easyconfig file :param toolchain_mapping: Mapping between source toolchain and target toolchain :param targetdir: Directory to dump the modified easyconfig file in :return: Location of the modified easyconfig file """ # Fully parse the original easyconfig parsed_ec = process_easyconfig(ec_spec, validate=False)[0] # Replace the toolchain if the mapping exists tc_name = parsed_ec['ec']['toolchain']['name'] if tc_name in toolchain_mapping: new_toolchain = toolchain_mapping[tc_name] _log.debug("Replacing parent toolchain %s with %s", parsed_ec['ec']['toolchain'], new_toolchain) parsed_ec['ec']['toolchain'] = new_toolchain # Replace the toolchains of all the dependencies for key in DEPENDENCY_PARAMETERS: # loop over a *copy* of dependency dicts (with resolved templates); # to update the original dep dict, we need to index with idx into self._config[key][0]... for idx, dep in enumerate(parsed_ec['ec'][key]): # reference to original dep dict, this is the one we should be updating orig_dep = parsed_ec['ec']._config[key][0][idx] # skip dependencies that are marked as external modules if dep['external_module']: continue dep_tc_name = dep['toolchain']['name'] if dep_tc_name in toolchain_mapping: orig_dep['toolchain'] = toolchain_mapping[dep_tc_name] # Replace the binutils version (if necessary) if 'binutils' in toolchain_mapping and ( dep['name'] == 'binutils' and dep_tc_name == GCCcore.NAME): orig_dep.update(toolchain_mapping['binutils']) # set module names orig_dep['short_mod_name'] = ActiveMNS().det_short_module_name( dep) orig_dep['full_mod_name'] = ActiveMNS().det_full_module_name( dep) # Determine the name of the modified easyconfig and dump it to target_dir ec_filename = '%s-%s.eb' % (parsed_ec['ec']['name'], det_full_ec_version(parsed_ec['ec'])) tweaked_spec = os.path.join(targetdir or tempfile.gettempdir(), ec_filename) parsed_ec['ec'].dump(tweaked_spec, always_overwrite=False, backup=True) _log.debug("Dumped easyconfig tweaked via --try-toolchain* to %s", tweaked_spec) return tweaked_spec
def map_easyconfig_to_target_tc_hierarchy(ec_spec, toolchain_mapping, targetdir=None): """ Take an easyconfig spec, parse it, map it to a target toolchain and dump it out :param ec_spec: Location of original easyconfig file :param toolchain_mapping: Mapping between source toolchain and target toolchain :param targetdir: Directory to dump the modified easyconfig file in :return: Location of the modified easyconfig file """ # Fully parse the original easyconfig parsed_ec = process_easyconfig(ec_spec, validate=False)[0]['ec'] # Replace the toolchain if the mapping exists tc_name = parsed_ec['toolchain']['name'] if tc_name in toolchain_mapping: new_toolchain = toolchain_mapping[tc_name] _log.debug("Replacing parent toolchain %s with %s", parsed_ec['toolchain'], new_toolchain) parsed_ec['toolchain'] = new_toolchain # Replace the toolchains of all the dependencies for key in DEPENDENCY_PARAMETERS: # loop over a *copy* of dependency dicts (with resolved templates); # to update the original dep dict, we need to get a reference with templating disabled... val = parsed_ec[key] orig_val = parsed_ec.get_ref(key) if key in parsed_ec.iterate_options: val = flatten(val) orig_val = flatten(orig_val) for idx, dep in enumerate(val): # reference to original dep dict, this is the one we should be updating orig_dep = orig_val[idx] # skip dependencies that are marked as external modules if dep['external_module']: continue dep_tc_name = dep['toolchain']['name'] if dep_tc_name in toolchain_mapping: orig_dep['toolchain'] = toolchain_mapping[dep_tc_name] # Replace the binutils version (if necessary) if 'binutils' in toolchain_mapping and (dep['name'] == 'binutils' and dep_tc_name == GCCcore.NAME): orig_dep.update(toolchain_mapping['binutils']) # set module names orig_dep['short_mod_name'] = ActiveMNS().det_short_module_name(dep) orig_dep['full_mod_name'] = ActiveMNS().det_full_module_name(dep) # Determine the name of the modified easyconfig and dump it to target_dir ec_filename = '%s-%s.eb' % (parsed_ec['name'], det_full_ec_version(parsed_ec)) tweaked_spec = os.path.join(targetdir or tempfile.gettempdir(), ec_filename) parsed_ec.dump(tweaked_spec, always_overwrite=False, backup=True) _log.debug("Dumped easyconfig tweaked via --try-toolchain* to %s", tweaked_spec) return tweaked_spec
def test_find_minimally_resolved_modules(self): """Test find_minimally_resolved_modules function.""" # replace log.experimental with log.warning to allow experimental code easybuild.framework.easyconfig.tools._log.experimental = easybuild.framework.easyconfig.tools._log.warning test_easyconfigs = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'easyconfigs') init_config(build_options={ 'valid_module_classes': module_classes(), 'robot_path': test_easyconfigs, }) barec = os.path.join(self.test_prefix, 'bar-1.2.3-goolf-1.4.10.eb') barec_txt = '\n'.join([ "easyblock = 'ConfigureMake'", "name = 'bar'", "version = '1.2.3'", "homepage = 'http://example.com'", "description = 'foo'", "toolchain = {'name': 'goolf', 'version': '1.4.10'}", # deliberately listing components of toolchain as dependencies without specifying subtoolchains, # to test resolving of dependencies with minimal toolchain # for each of these, we know test easyconfigs are available (which are required here) "dependencies = [", " ('OpenMPI', '1.6.4'),", # available with GCC/4.7.2 " ('OpenBLAS', '0.2.6', '-LAPACK-3.4.2'),", # available with gompi/1.4.10 " ('ScaLAPACK', '2.0.2', '-OpenBLAS-0.2.6-LAPACK-3.4.2'),", # available with gompi/1.4.10 " ('SQLite', '3.8.10.2'),", # only available with goolf/1.4.10 "]", ]) write_file(barec, barec_txt) bar = process_easyconfig(barec)[0] ecs = [bar] mods = [ 'gompi/1.4.10', 'goolf/1.4.10', # include modules for dependencies, with subtoolchains rather than full toolchain (except for SQLite) 'OpenMPI/1.6.4-GCC-4.7.2', 'OpenBLAS/0.2.6-gompi-1.4.10-LAPACK-3.4.2', 'ScaLAPACK/2.0.2-gompi-1.4.10-OpenBLAS-0.2.6-LAPACK-3.4.2', 'SQLite/3.8.10.2-GCC-4.7.2', ] ordered_ecs, new_easyconfigs, new_avail_modules = find_minimally_resolved_modules(ecs, mods, []) # all dependencies are resolved for easyconfigs included in ordered_ecs self.assertEqual(len(ordered_ecs), 1) self.assertEqual(ordered_ecs[0]['dependencies'], []) # module is added to list of available modules self.assertTrue(bar['ec'].full_mod_name in new_avail_modules) # nothing left self.assertEqual(new_easyconfigs, [])
def tweak(easyconfigs, build_specs, modtool, targetdirs=None): """Tweak list of easyconfigs according to provided build specifications.""" tweaked_ecs_path, tweaked_ecs_deps_path = None, None if targetdirs is not None: tweaked_ecs_path, tweaked_ecs_deps_path = targetdirs # make sure easyconfigs all feature the same toolchain (otherwise we *will* run into trouble) toolchains = nub(['%(name)s/%(version)s' % ec['ec']['toolchain'] for ec in easyconfigs]) if len(toolchains) > 1: raise EasyBuildError("Multiple toolchains featured in easyconfigs, --try-X not supported in that case: %s", toolchains) if 'name' in build_specs or 'version' in build_specs: # no recursion if software name/version build specification are included # in that case, do not construct full dependency graph orig_ecs = easyconfigs _log.debug("Software name/version found, so not applying build specifications recursively: %s" % build_specs) else: # build specifications should be applied to the whole dependency graph # obtain full dependency graph for specified easyconfigs # easyconfigs will be ordered 'top-to-bottom': toolchain dependencies and toolchain first _log.debug("Applying build specifications recursively (no software name/version found): %s" % build_specs) orig_ecs = resolve_dependencies(easyconfigs, modtool, retain_all_deps=True) # keep track of originally listed easyconfigs (via their path) listed_ec_paths = [ec['spec'] for ec in easyconfigs] # determine toolchain based on last easyconfigs if orig_ecs: toolchain = orig_ecs[-1]['ec']['toolchain'] _log.debug("Filtering using toolchain %s" % toolchain) # filter easyconfigs unless a dummy toolchain is used: drop toolchain and toolchain dependencies if toolchain['name'] != DUMMY_TOOLCHAIN_NAME: while orig_ecs[0]['ec']['toolchain'] != toolchain: orig_ecs = orig_ecs[1:] # generate tweaked easyconfigs, and continue with those instead tweaked_easyconfigs = [] for orig_ec in orig_ecs: # Only return tweaked easyconfigs for easyconfigs which were listed originally on the command line (and use the # prepended path so that they are found first). # easyconfig files for dependencies are also generated but not included, they will be resolved via --robot # either from existing easyconfigs or, if that fails, from easyconfigs in the appended path if orig_ec['spec'] in listed_ec_paths: new_ec_file = tweak_one(orig_ec['spec'], None, build_specs, targetdir=tweaked_ecs_path) new_ecs = process_easyconfig(new_ec_file, build_specs=build_specs) tweaked_easyconfigs.extend(new_ecs) else: # Place all tweaked dependency easyconfigs in the directory appended to the robot path new_ec_file = tweak_one(orig_ec['spec'], None, build_specs, targetdir=tweaked_ecs_deps_path) return tweaked_easyconfigs
def tweak(easyconfigs, build_specs, targetdir=None): """Tweak list of easyconfigs according to provided build specifications.""" # make sure easyconfigs all feature the same toolchain (otherwise we *will* run into trouble) toolchains = nub(["%(name)s/%(version)s" % ec["ec"]["toolchain"] for ec in easyconfigs]) if len(toolchains) > 1: raise EasyBuildError( "Multiple toolchains featured in easyconfigs, --try-X not supported in that case: %s", toolchains ) if "name" in build_specs or "version" in build_specs: # no recursion if software name/version build specification are included # in that case, do not construct full dependency graph orig_ecs = easyconfigs _log.debug("Software name/version found, so not applying build specifications recursively: %s" % build_specs) else: # build specifications should be applied to the whole dependency graph # obtain full dependency graph for specified easyconfigs # easyconfigs will be ordered 'top-to-bottom': toolchain dependencies and toolchain first _log.debug("Applying build specifications recursively (no software name/version found): %s" % build_specs) orig_ecs = resolve_dependencies(easyconfigs, retain_all_deps=True) # keep track of originally listed easyconfigs (via their path) listed_ec_paths = [ec["spec"] for ec in easyconfigs] # obtain full dependency graph for specified easyconfigs # easyconfigs will be ordered 'top-to-bottom': toolchain dependencies and toolchain first orig_ecs = resolve_dependencies(easyconfigs, retain_all_deps=True) # determine toolchain based on last easyconfigs toolchain = orig_ecs[-1]["ec"]["toolchain"] _log.debug("Filtering using toolchain %s" % toolchain) # filter easyconfigs unless a dummy toolchain is used: drop toolchain and toolchain dependencies if toolchain["name"] != DUMMY_TOOLCHAIN_NAME: while orig_ecs[0]["ec"]["toolchain"] != toolchain: orig_ecs = orig_ecs[1:] # generate tweaked easyconfigs, and continue with those instead tweaked_easyconfigs = [] for orig_ec in orig_ecs: new_ec_file = tweak_one(orig_ec["spec"], None, build_specs, targetdir=targetdir) # only return tweaked easyconfigs for easyconfigs which were listed originally # easyconfig files for dependencies are also generated but not included, and will be resolved via --robot if orig_ec["spec"] in listed_ec_paths: new_ecs = process_easyconfig(new_ec_file, build_specs=build_specs) tweaked_easyconfigs.extend(new_ecs) return tweaked_easyconfigs
def get_dep_tree_of_toolchain(toolchain_spec, modtool): """ Get list of dependencies of a toolchain (as EasyConfig objects) :param toolchain_spec: toolchain spec to get the dependencies of :param modtool: module tool used :return: The dependency tree of the toolchain spec """ path = robot_find_easyconfig(toolchain_spec['name'], toolchain_spec['version']) if path is None: raise EasyBuildError("Could not find easyconfig for %s toolchain version %s", toolchain_spec['name'], toolchain_spec['version']) ec = process_easyconfig(path, validate=False) return [dep['ec'] for dep in resolve_dependencies(ec, modtool, retain_all_deps=True)]
def get_dep_tree_of_toolchain(toolchain_spec, modtool): """ Get list of dependencies of a toolchain (as EasyConfig objects) :param toolchain_spec: toolchain spec to get the dependencies of :param modtool: module tool used :return: The dependency tree of the toolchain spec """ path = robot_find_easyconfig(toolchain_spec['name'], toolchain_spec['version']) if path is None: raise EasyBuildError("Could not find easyconfig for %s toolchain version %s", toolchain_spec['name'], toolchain_spec['version']) ec = process_easyconfig(path, validate=False) return [dep['ec'] for dep in resolve_dependencies(ec, modtool, retain_all_deps=True)]
def test_find_potential_version_mappings(self): """Test ability to find potential version mappings of a dependency for a given toolchain mapping""" test_easyconfigs = os.path.join( os.path.dirname(os.path.abspath(__file__)), 'easyconfigs', 'test_ecs') init_config( build_options={ 'robot_path': [test_easyconfigs], 'silent': True, 'valid_module_classes': module_classes(), }) get_toolchain_hierarchy.clear() gcc_binutils_tc = {'name': 'GCC', 'version': '4.9.3-2.26'} iccifort_binutils_tc = { 'name': 'iccifort', 'version': '2016.1.150-GCC-4.9.3-2.25' } # The below mapping includes a binutils mapping (2.26 to 2.25) tc_mapping = map_toolchain_hierarchies(gcc_binutils_tc, iccifort_binutils_tc, self.modtool) ec_spec = os.path.join(test_easyconfigs, 'h', 'hwloc', 'hwloc-1.6.2-GCC-4.9.3-2.26.eb') parsed_ec = process_easyconfig(ec_spec)[0] gzip_dep = [ dep for dep in parsed_ec['ec']['dependencies'] if dep['name'] == 'gzip' ][0] self.assertEqual(gzip_dep['full_mod_name'], 'gzip/1.4-GCC-4.9.3-2.26') potential_versions = find_potential_version_mappings( gzip_dep, tc_mapping) self.assertEqual(len(potential_versions), 1) # Should see version 1.6 of gzip with iccifort toolchain expected = { 'path': os.path.join(test_easyconfigs, 'g', 'gzip', 'gzip-1.6-iccifort-2016.1.150-GCC-4.9.3-2.25.eb'), 'toolchain': { 'name': 'iccifort', 'version': '2016.1.150-GCC-4.9.3-2.25' }, 'version': '1.6', } self.assertEqual(potential_versions[0], expected)
def test_map_easyconfig_to_target_tc_hierarchy(self): """Test mapping of easyconfig to target hierarchy""" test_easyconfigs = os.path.join( os.path.dirname(os.path.abspath(__file__)), 'easyconfigs', 'test_ecs') init_config( build_options={ 'robot_path': test_easyconfigs, 'silent': True, 'valid_module_classes': module_classes(), }) get_toolchain_hierarchy.clear() gcc_binutils_tc = {'name': 'GCC', 'version': '4.9.3-2.26'} iccifort_binutils_tc = { 'name': 'iccifort', 'version': '2016.1.150-GCC-4.9.3-2.25' } # The below mapping includes a binutils mapping (2.26 to 2.25) tc_mapping = map_toolchain_hierarchies(gcc_binutils_tc, iccifort_binutils_tc, self.modtool) ec_spec = os.path.join(test_easyconfigs, 'h', 'hwloc', 'hwloc-1.6.2-GCC-4.9.3-2.26.eb') tweaked_spec = map_easyconfig_to_target_tc_hierarchy( ec_spec, tc_mapping) tweaked_ec = process_easyconfig(tweaked_spec)[0] tweaked_dict = tweaked_ec['ec'].asdict() # First check the mapped toolchain key, value = 'toolchain', iccifort_binutils_tc self.assertTrue(key in tweaked_dict and value == tweaked_dict[key]) # Also check that binutils has been mapped for key, value in { 'name': 'binutils', 'version': '2.25', 'versionsuffix': '' }.items(): self.assertTrue( key in tweaked_dict['builddependencies'][0] and value == tweaked_dict['builddependencies'][0][key])
def parse_easyconfigs(paths): """ Parse easyconfig files @params paths: paths to easyconfigs """ easyconfigs = [] generated_ecs = False for (path, generated) in paths: path = os.path.abspath(path) # keep track of whether any files were generated generated_ecs |= generated if not os.path.exists(path): _log.error("Can't find path %s" % path) try: ec_files = find_easyconfigs(path, ignore_dirs=build_option('ignore_dirs')) for ec_file in ec_files: # only pass build specs when not generating easyconfig files kwargs = {} if not build_option('try_to_generate'): kwargs['build_specs'] = build_option('build_specs') ecs = process_easyconfig(ec_file, **kwargs) easyconfigs.extend(ecs) except IOError, err: _log.error("Processing easyconfigs in path %s failed: %s" % (path, err))
def test_map_easyconfig_to_target_tc_hierarchy(self): """Test mapping of easyconfig to target hierarchy""" test_easyconfigs = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'easyconfigs', 'test_ecs') build_options = { 'robot_path': [test_easyconfigs], 'silent': True, 'valid_module_classes': module_classes(), } init_config(build_options=build_options) get_toolchain_hierarchy.clear() gcc_binutils_tc = {'name': 'GCC', 'version': '4.9.3-2.26'} iccifort_binutils_tc = {'name': 'iccifort', 'version': '2016.1.150-GCC-4.9.3-2.25'} # The below mapping includes a binutils mapping (2.26 to 2.25) tc_mapping = map_toolchain_hierarchies(gcc_binutils_tc, iccifort_binutils_tc, self.modtool) ec_spec = os.path.join(test_easyconfigs, 'h', 'hwloc', 'hwloc-1.6.2-GCC-4.9.3-2.26.eb') tweaked_spec = map_easyconfig_to_target_tc_hierarchy(ec_spec, tc_mapping) tweaked_ec = process_easyconfig(tweaked_spec)[0] tweaked_dict = tweaked_ec['ec'].asdict() # First check the mapped toolchain key, value = 'toolchain', iccifort_binutils_tc self.assertTrue(key in tweaked_dict and value == tweaked_dict[key]) # Also check that binutils has been mapped for key, value in {'name': 'binutils', 'version': '2.25', 'versionsuffix': ''}.items(): self.assertTrue(key in tweaked_dict['builddependencies'][0] and value == tweaked_dict['builddependencies'][0][key]) # Now test the case where we try to update the dependencies init_config(build_options=build_options) get_toolchain_hierarchy.clear() tweaked_spec = map_easyconfig_to_target_tc_hierarchy(ec_spec, tc_mapping, update_dep_versions=True) tweaked_ec = process_easyconfig(tweaked_spec)[0] tweaked_dict = tweaked_ec['ec'].asdict() # First check the mapped toolchain key, value = 'toolchain', iccifort_binutils_tc self.assertTrue(key in tweaked_dict and value == tweaked_dict[key]) # Also check that binutils has been mapped for key, value in {'name': 'binutils', 'version': '2.25', 'versionsuffix': ''}.items(): self.assertTrue( key in tweaked_dict['builddependencies'][0] and value == tweaked_dict['builddependencies'][0][key] ) # Also check that the gzip dependency was upgraded for key, value in {'name': 'gzip', 'version': '1.6', 'versionsuffix': ''}.items(): self.assertTrue(key in tweaked_dict['dependencies'][0] and value == tweaked_dict['dependencies'][0][key]) # Make sure there are checksums for our next test self.assertTrue(tweaked_dict['checksums']) # Test the case where we also update the software version at the same time init_config(build_options=build_options) get_toolchain_hierarchy.clear() new_version = '1.x.3' tweaked_spec = map_easyconfig_to_target_tc_hierarchy(ec_spec, tc_mapping, update_build_specs={'version': new_version}, update_dep_versions=True) tweaked_ec = process_easyconfig(tweaked_spec)[0] tweaked_dict = tweaked_ec['ec'].asdict() # First check the mapped toolchain key, value = 'toolchain', iccifort_binutils_tc self.assertTrue(key in tweaked_dict and value == tweaked_dict[key]) # Also check that binutils has been mapped for key, value in {'name': 'binutils', 'version': '2.25', 'versionsuffix': ''}.items(): self.assertTrue( key in tweaked_dict['builddependencies'][0] and value == tweaked_dict['builddependencies'][0][key] ) # Also check that the gzip dependency was upgraded for key, value in {'name': 'gzip', 'version': '1.6', 'versionsuffix': ''}.items(): self.assertTrue(key in tweaked_dict['dependencies'][0] and value == tweaked_dict['dependencies'][0][key]) # Finally check that the version was upgraded key, value = 'version', new_version self.assertTrue(key in tweaked_dict and value == tweaked_dict[key]) # and that the checksum was removed self.assertFalse(tweaked_dict['checksums']) # Check that if we update a software version, it also updates the version if the software appears in an # extension list (like for a PythonBundle) ec_spec = os.path.join(test_easyconfigs, 't', 'toy', 'toy-0.0-gompi-2018a-test.eb') # Create the trivial toolchain mapping toolchain = {'name': 'gompi', 'version': '2018a'} tc_mapping = map_toolchain_hierarchies(toolchain, toolchain, self.modtool) # Update the software version init_config(build_options=build_options) get_toolchain_hierarchy.clear() new_version = '1.x.3' tweaked_spec = map_easyconfig_to_target_tc_hierarchy(ec_spec, tc_mapping, update_build_specs={'version': new_version}, update_dep_versions=False) tweaked_ec = process_easyconfig(tweaked_spec)[0] extensions = tweaked_ec['ec']['exts_list'] # check one extension with the same name exists and that the version has been updated hit_extension = 0 for extension in extensions: if isinstance(extension, tuple) and extension[0] == 'toy': self.assertEqual(extension[1], new_version) # Make sure checksum has been purged self.assertFalse('checksums' in extension[2]) hit_extension += 1 self.assertEqual(hit_extension, 1, "Should only have updated one extension")
def test_find_potential_version_mappings(self): """Test ability to find potential version mappings of a dependency for a given toolchain mapping""" test_easyconfigs = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'easyconfigs', 'test_ecs') init_config(build_options={ 'robot_path': [test_easyconfigs], 'silent': True, 'valid_module_classes': module_classes(), }) get_toolchain_hierarchy.clear() gcc_binutils_tc = {'name': 'GCC', 'version': '4.9.3-2.26'} iccifort_binutils_tc = {'name': 'iccifort', 'version': '2016.1.150-GCC-4.9.3-2.25'} # The below mapping includes a binutils mapping (2.26 to 2.25) tc_mapping = map_toolchain_hierarchies(gcc_binutils_tc, iccifort_binutils_tc, self.modtool) ec_spec = os.path.join(test_easyconfigs, 'h', 'hwloc', 'hwloc-1.6.2-GCC-4.9.3-2.26.eb') parsed_ec = process_easyconfig(ec_spec)[0] gzip_dep = [dep for dep in parsed_ec['ec']['dependencies'] if dep['name'] == 'gzip'][0] self.assertEqual(gzip_dep['full_mod_name'], 'gzip/1.4-GCC-4.9.3-2.26') potential_versions = find_potential_version_mappings(gzip_dep, tc_mapping) self.assertEqual(len(potential_versions), 1) # Should see version 1.6 of gzip with iccifort toolchain expected = { 'path': os.path.join(test_easyconfigs, 'g', 'gzip', 'gzip-1.6-iccifort-2016.1.150-GCC-4.9.3-2.25.eb'), 'toolchain': {'name': 'iccifort', 'version': '2016.1.150-GCC-4.9.3-2.25'}, 'version': '1.6', 'versionsuffix': '', } self.assertEqual(potential_versions[0], expected) # Test that we can override respecting the versionsuffix # Create toolchain mapping for OpenBLAS gcc_4_tc = {'name': 'GCC', 'version': '4.8.2'} gcc_6_tc = {'name': 'GCC', 'version': '6.4.0-2.28'} tc_mapping = map_toolchain_hierarchies(gcc_4_tc, gcc_6_tc, self.modtool) # Create a dep with the necessary params (including versionsuffix) openblas_dep = { 'toolchain': {'version': '4.8.2', 'name': 'GCC'}, 'name': 'OpenBLAS', 'system': False, 'versionsuffix': '-LAPACK-3.4.2', 'version': '0.2.8' } self.mock_stderr(True) potential_versions = find_potential_version_mappings(openblas_dep, tc_mapping) errtxt = self.get_stderr() warning_stub = "\nWARNING: There may be newer version(s) of dep 'OpenBLAS' available with a different " \ "versionsuffix to '-LAPACK-3.4.2'" self.mock_stderr(False) self.assertTrue(errtxt.startswith(warning_stub)) self.assertEqual(len(potential_versions), 0) potential_versions = find_potential_version_mappings(openblas_dep, tc_mapping, ignore_versionsuffixes=True) self.assertEqual(len(potential_versions), 1) expected = { 'path': os.path.join(test_easyconfigs, 'o', 'OpenBLAS', 'OpenBLAS-0.2.20-GCC-6.4.0-2.28.eb'), 'toolchain': {'version': '6.4.0-2.28', 'name': 'GCC'}, 'version': '0.2.20', 'versionsuffix': '', } self.assertEqual(potential_versions[0], expected)
def resolve_dependencies(easyconfigs, modtool, retain_all_deps=False, raise_error_missing_ecs=True): """ Work through the list of easyconfigs to determine an optimal order :param easyconfigs: list of easyconfigs :param modtool: ModulesTool instance to use :param retain_all_deps: boolean indicating whether all dependencies must be retained, regardless of availability; retain all deps when True, check matching build option when False :param raise_error_missing_ecs: raise an error when one or more easyconfig files could not be found """ robot = build_option('robot_path') # retain all dependencies if specified by either the resp. build option or the dedicated named argument retain_all_deps = build_option('retain_all_deps') or retain_all_deps avail_modules = modtool.available() if retain_all_deps: # assume that no modules are available when forced, to retain all dependencies avail_modules = [] _log.info("Forcing all dependencies to be retained.") else: if len(avail_modules) == 0: _log.warning( "No installed modules. Your MODULEPATH is probably incomplete: %s" % os.getenv('MODULEPATH')) ordered_ecs = [] # all available modules can be used for resolving dependencies except those that will be installed being_installed = [p['full_mod_name'] for p in easyconfigs] avail_modules = [m for m in avail_modules if m not in being_installed] _log.debug('easyconfigs before resolving deps: %s', easyconfigs) totally_missing, missing_easyconfigs = [], [] # resolve all dependencies, put a safeguard in place to avoid an infinite loop (shouldn't occur though) loopcnt = 0 maxloopcnt = 10000 while easyconfigs: # make sure this stops, we really don't want to get stuck in an infinite loop loopcnt += 1 if loopcnt > maxloopcnt: raise EasyBuildError( "Maximum loop cnt %s reached, so quitting (easyconfigs: %s, missing_easyconfigs: %s)", maxloopcnt, easyconfigs, missing_easyconfigs) # first try resolving dependencies without using external dependencies last_processed_count = -1 while len(avail_modules) > last_processed_count: last_processed_count = len(avail_modules) res = find_resolved_modules(easyconfigs, avail_modules, modtool, retain_all_deps=retain_all_deps) resolved_ecs, easyconfigs, avail_modules = res ordered_ec_mod_names = [x['full_mod_name'] for x in ordered_ecs] for ec in resolved_ecs: # only add easyconfig if it's not included yet (based on module name) if not ec['full_mod_name'] in ordered_ec_mod_names: ordered_ecs.append(ec) # dependencies marked as external modules should be resolved via available modules at this point missing_external_modules = [ d['full_mod_name'] for ec in easyconfigs for d in ec['dependencies'] if d.get('external_module', False) ] if missing_external_modules: raise EasyBuildError( "Missing modules for dependencies marked as external modules: %s", ', '.join(missing_external_modules)) # robot: look for existing dependencies, add them if robot and easyconfigs: # rely on EasyBuild module naming scheme when resolving dependencies, since we know that will # generate sensible module names that include the necessary information for the resolution to work # (name, version, toolchain, versionsuffix) being_installed = [ EasyBuildMNS().det_full_module_name(p['ec']) for p in easyconfigs ] additional = [] for entry in easyconfigs: # do not choose an entry that is being installed in the current run # if they depend, you probably want to rebuild them using the new dependency deps = entry['dependencies'] candidates = [ d for d in deps if not EasyBuildMNS().det_full_module_name( d) in being_installed ] if candidates: cand_dep = candidates[0] # find easyconfig, might not find any _log.debug("Looking for easyconfig for %s" % str(cand_dep)) # note: robot_find_easyconfig may return None path = robot_find_easyconfig(cand_dep['name'], det_full_ec_version(cand_dep)) if path is None: full_mod_name = ActiveMNS().det_full_module_name( cand_dep) # no easyconfig found + no module available => missing dependency if not modtool.exist([full_mod_name])[0]: if cand_dep not in totally_missing: totally_missing.append(cand_dep) # no easyconfig found for dependency, but module is available # => add to list of missing easyconfigs elif cand_dep not in missing_easyconfigs: _log.debug( "Irresolvable dependency found (no easyconfig file): %s", cand_dep) missing_easyconfigs.append(cand_dep) # remove irresolvable dependency from list of dependencies so we can continue entry['dependencies'].remove(cand_dep) # add dummy entry for this dependency, so --dry-run for example can still report the dep additional.append({ 'dependencies': [], 'ec': None, 'full_mod_name': full_mod_name, 'spec': None, }) else: _log.info("Robot: resolving dependency %s with %s" % (cand_dep, path)) # build specs should not be passed down to resolved dependencies, # to avoid that e.g. --try-toolchain trickles down into the used toolchain itself hidden = cand_dep.get('hidden', False) processed_ecs = process_easyconfig( path, validate=not retain_all_deps, hidden=hidden) # ensure that selected easyconfig provides required dependency verify_easyconfig_filename(path, cand_dep, parsed_ec=processed_ecs) for ec in processed_ecs: if ec not in easyconfigs + additional: additional.append(ec) _log.debug("Added %s as dependency of %s" % (ec, entry)) else: mod_name = EasyBuildMNS().det_full_module_name(entry['ec']) _log.debug( "No more candidate dependencies to resolve for %s" % mod_name) # add additional (new) easyconfigs to list of stuff to process easyconfigs.extend(additional) _log.debug("Unprocessed dependencies: %s", easyconfigs) elif not robot: # no use in continuing if robot is not enabled, dependencies won't be resolved anyway missing_deps = [ dep for x in easyconfigs for dep in x['dependencies'] ] if missing_deps: raise_error_missing_deps( missing_deps, extra_msg="enable dependency resolution via --robot?") if totally_missing: raise_error_missing_deps( totally_missing, extra_msg="no easyconfig file or existing module found") if missing_easyconfigs: if raise_error_missing_ecs: raise_error_missing_deps( missing_easyconfigs, extra_msg="no easyconfig file found in robot search path") else: _log.warning("No easyconfig files found for: %s", missing_easyconfigs) _log.info("Dependency resolution complete, building as follows: %s", ordered_ecs) return ordered_ecs
def test_resolve_dependencies_minimal(self): """Test resolved dependencies with minimal toolchain.""" # replace log.experimental with log.warning to allow experimental code easybuild.framework.easyconfig.tools._log.experimental = easybuild.framework.easyconfig.tools._log.warning test_easyconfigs = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'easyconfigs', 'test_ecs') self.install_mock_module() init_config(build_options={ 'allow_modules_tool_mismatch': True, 'minimal_toolchains': True, 'use_existing_modules': True, 'external_modules_metadata': ConfigObj(), 'robot_path': test_easyconfigs, 'valid_module_classes': module_classes(), 'validate': False, }) barec = os.path.join(self.test_prefix, 'bar-1.2.3-goolf-1.4.10.eb') barec_lines = [ "easyblock = 'ConfigureMake'", "name = 'bar'", "version = '1.2.3'", "homepage = 'http://example.com'", "description = 'foo'", # deliberately listing components of toolchain as dependencies without specifying subtoolchains, # to test resolving of dependencies with minimal toolchain # for each of these, we know test easyconfigs are available (which are required here) "dependencies = [", " ('OpenMPI', '1.6.4'),", # available with GCC/4.7.2 " ('OpenBLAS', '0.2.6', '-LAPACK-3.4.2'),", # available with gompi/1.4.10 " ('ScaLAPACK', '2.0.2', '-OpenBLAS-0.2.6-LAPACK-3.4.2'),", # available with gompi/1.4.10 " ('SQLite', '3.8.10.2'),", "]", # toolchain as list line, for easy modification later; # the use of %(version_major)s here is mainly to check if templates are being handled correctly # (it doesn't make much sense, but it serves the purpose) "toolchain = {'name': 'goolf', 'version': '%(version_major)s.4.10'}", ] write_file(barec, '\n'.join(barec_lines)) bar = process_easyconfig(barec)[0] # all modules in the dep graph, in order all_mods_ordered = [ 'GCC/4.7.2', 'hwloc/1.6.2-GCC-4.7.2', 'OpenMPI/1.6.4-GCC-4.7.2', 'gompi/1.4.10', 'OpenBLAS/0.2.6-gompi-1.4.10-LAPACK-3.4.2', 'ScaLAPACK/2.0.2-gompi-1.4.10-OpenBLAS-0.2.6-LAPACK-3.4.2', 'SQLite/3.8.10.2-GCC-4.7.2', 'FFTW/3.3.3-gompi-1.4.10', 'goolf/1.4.10', 'bar/1.2.3-goolf-1.4.10', ] # no modules available, so all dependencies are retained MockModule.avail_modules = [] res = resolve_dependencies([bar], self.modtool) self.assertEqual(len(res), 10) self.assertEqual([x['full_mod_name'] for x in res], all_mods_ordered) MockModule.avail_modules = [ 'GCC/4.7.2', 'gompi/1.4.10', 'goolf/1.4.10', 'OpenMPI/1.6.4-GCC-4.7.2', 'OpenBLAS/0.2.6-gompi-1.4.10-LAPACK-3.4.2', 'ScaLAPACK/2.0.2-gompi-1.4.10-OpenBLAS-0.2.6-LAPACK-3.4.2', 'SQLite/3.8.10.2-GCC-4.7.2', ] # test resolving dependencies with minimal toolchain (rather than using goolf/1.4.10 for all of them) # existing modules are *not* taken into account when determining minimal subtoolchain, by default res = resolve_dependencies([bar], self.modtool) self.assertEqual(len(res), 1) self.assertEqual(res[0]['full_mod_name'], bar['ec'].full_mod_name) # test retaining all dependencies, regardless of whether modules are available or not res = resolve_dependencies([bar], self.modtool, retain_all_deps=True) self.assertEqual(len(res), 10) mods = [x['full_mod_name'] for x in res] self.assertEqual(mods, all_mods_ordered) self.assertTrue('SQLite/3.8.10.2-GCC-4.7.2' in mods) # test taking into account existing modules # with an SQLite module with goolf/1.4.10 in place, this toolchain should be used rather than GCC/4.7.2 MockModule.avail_modules = [ 'SQLite/3.8.10.2-goolf-1.4.10', ] # parsed easyconfigs are cached, so clear the cache before reprocessing easyconfigs ecec._easyconfigs_cache.clear() bar = process_easyconfig(barec)[0] res = resolve_dependencies([bar], self.modtool, retain_all_deps=True) self.assertEqual(len(res), 10) mods = [x['full_mod_name'] for x in res] self.assertTrue('SQLite/3.8.10.2-goolf-1.4.10' in mods) self.assertFalse('SQLite/3.8.10.2-GCC-4.7.2' in mods) # Check whether having 2 version of dummy toolchain is ok # Clear easyconfig and toolchain caches ecec._easyconfigs_cache.clear() get_toolchain_hierarchy.clear() init_config(build_options={ 'allow_modules_tool_mismatch': True, 'minimal_toolchains': True, 'add_dummy_to_minimal_toolchains': True, 'external_modules_metadata': ConfigObj(), 'robot_path': test_easyconfigs, 'valid_module_classes': module_classes(), 'validate': False, }) impi_txt = read_file(os.path.join(test_easyconfigs, 'i', 'impi', 'impi-4.1.3.049.eb')) self.assertTrue(re.search("^toolchain = {'name': 'dummy', 'version': ''}", impi_txt, re.M)) gzip_txt = read_file(os.path.join(test_easyconfigs, 'g', 'gzip', 'gzip-1.4.eb')) self.assertTrue(re.search("^toolchain = {'name': 'dummy', 'version': 'dummy'}", gzip_txt, re.M)) barec = os.path.join(self.test_prefix, 'bar-1.2.3-goolf-1.4.10.eb') barec_lines = [ "easyblock = 'ConfigureMake'", "name = 'bar'", "version = '1.2.3'", "homepage = 'http://example.com'", "description = 'foo'", # deliberately listing components of toolchain as dependencies without specifying subtoolchains, # to test resolving of dependencies with minimal toolchain # for each of these, we know test easyconfigs are available (which are required here) "dependencies = [", " ('impi', '4.1.3.049'),", # has toolchain ('dummy', '') " ('gzip', '1.4'),", # has toolchain ('dummy', 'dummy') "]", # toolchain as list line, for easy modification later "toolchain = {'name': 'goolf', 'version': '1.4.10'}", ] write_file(barec, '\n'.join(barec_lines)) bar = process_easyconfig(barec)[0] res = resolve_dependencies([bar], self.modtool, retain_all_deps=True) self.assertEqual(len(res), 11) mods = [x['full_mod_name'] for x in res] self.assertTrue('impi/4.1.3.049' in mods) self.assertTrue('gzip/1.4' in mods)
def test_resolve_dependencies_minimal(self): """Test resolved dependencies with minimal toolchain.""" # replace log.experimental with log.warning to allow experimental code easybuild.framework.easyconfig.tools._log.experimental = easybuild.framework.easyconfig.tools._log.warning test_easyconfigs = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'easyconfigs') install_mock_module() init_config(build_options={ 'allow_modules_tool_mismatch': True, 'external_modules_metadata': ConfigObj(), 'robot_path': test_easyconfigs, 'valid_module_classes': module_classes(), 'validate': False, }) barec = os.path.join(self.test_prefix, 'bar-1.2.3-goolf-1.4.10.eb') barec_lines = [ "easyblock = 'ConfigureMake'", "name = 'bar'", "version = '1.2.3'", "homepage = 'http://example.com'", "description = 'foo'", # deliberately listing components of toolchain as dependencies without specifying subtoolchains, # to test resolving of dependencies with minimal toolchain # for each of these, we know test easyconfigs are available (which are required here) "dependencies = [", " ('OpenMPI', '1.6.4'),", # available with GCC/4.7.2 " ('OpenBLAS', '0.2.6', '-LAPACK-3.4.2'),", # available with gompi/1.4.10 " ('ScaLAPACK', '2.0.2', '-OpenBLAS-0.2.6-LAPACK-3.4.2'),", # available with gompi/1.4.10 " ('SQLite', '3.8.10.2'),", "]", # toolchain as list line, for easy modification later "toolchain = {'name': 'goolf', 'version': '1.4.10'}", ] write_file(barec, '\n'.join(barec_lines)) bar = process_easyconfig(barec)[0] # all modules in the dep graph, in order all_mods_ordered = [ 'GCC/4.7.2', 'hwloc/1.6.2-GCC-4.7.2', 'OpenMPI/1.6.4-GCC-4.7.2', 'gompi/1.4.10', 'OpenBLAS/0.2.6-gompi-1.4.10-LAPACK-3.4.2', 'ScaLAPACK/2.0.2-gompi-1.4.10-OpenBLAS-0.2.6-LAPACK-3.4.2', 'SQLite/3.8.10.2-GCC-4.7.2', 'FFTW/3.3.3-gompi-1.4.10', 'goolf/1.4.10', 'bar/1.2.3-goolf-1.4.10', ] # no modules available, so all dependencies are retained MockModule.avail_modules = [] res = resolve_dependencies([bar], minimal_toolchains=True) self.assertEqual(len(res), 10) self.assertEqual([x['full_mod_name'] for x in res], all_mods_ordered) # cleanup shutil.rmtree(os.path.join(tempfile.gettempdir(), 'minimal-easyconfigs')) MockModule.avail_modules = [ 'GCC/4.7.2', 'gompi/1.4.10', 'goolf/1.4.10', 'OpenMPI/1.6.4-GCC-4.7.2', 'OpenBLAS/0.2.6-gompi-1.4.10-LAPACK-3.4.2', 'ScaLAPACK/2.0.2-gompi-1.4.10-OpenBLAS-0.2.6-LAPACK-3.4.2', 'SQLite/3.8.10.2-GCC-4.7.2', ] # test resolving dependencies with minimal toolchain (rather than using goolf/1.4.10 for all of them) # existing modules are *not* taken into account when determining minimal subtoolchain, by default res = resolve_dependencies([bar], minimal_toolchains=True) self.assertEqual(len(res), 1) self.assertEqual(res[0]['full_mod_name'], bar['ec'].full_mod_name) # cleanup shutil.rmtree(os.path.join(tempfile.gettempdir(), 'minimal-easyconfigs')) # test retaining all dependencies, regardless of whether modules are available or not res = resolve_dependencies([bar], minimal_toolchains=True, retain_all_deps=True) self.assertEqual(len(res), 10) mods = [x['full_mod_name'] for x in res] self.assertEqual(mods, all_mods_ordered) self.assertTrue('SQLite/3.8.10.2-GCC-4.7.2' in mods) # cleanup shutil.rmtree(os.path.join(tempfile.gettempdir(), 'minimal-easyconfigs')) # test taking into account existing modules # with an SQLite module with goolf/1.4.10 in place, this toolchain should be used rather than GCC/4.7.2 MockModule.avail_modules = [ 'SQLite/3.8.10.2-goolf-1.4.10', ] res = resolve_dependencies([bar], minimal_toolchains=True, retain_all_deps=True, use_existing_modules=True) self.assertEqual(len(res), 10) mods = [x['full_mod_name'] for x in res] self.assertTrue('SQLite/3.8.10.2-goolf-1.4.10' in mods) self.assertFalse('SQLite/3.8.10.2-GCC-4.7.2' in mods)
def test_toolchain_external_modules(self): """Test use of Toolchain easyblock with external modules.""" external_modules = [ 'gcc/8.3.0', 'openmpi/4.0.2', 'openblas/0.3.7', 'fftw/3.3.8', 'scalapack/2.0.2' ] external_modules_metadata = { # all metadata for gcc/8.3.0 'gcc/8.3.0': { 'name': ['GCC'], 'version': ['8.3.0'], 'prefix': '/software/gcc/8.3.0', }, # only name/version for openmpi/4.0.2 'openmpi/4.0.2': { 'name': ['OpenMPI'], 'version': ['4.0.2'], }, # only name/prefix for openblas/0.3.7 'openblas/0.3.7': { 'name': ['OpenBLAS'], 'prefix': '/software/openblas/0.3.7', }, # only version/prefix for fftw/3.3.8 (no name) 'fftw/3.3.8': { 'version': ['3.3.8'], 'prefix': '/software/fftw/3.3.8', }, # no metadata for scalapack/2.0.2 } # initialize configuration cleanup() eb_go = eboptions.parse_options( args=['--installpath=%s' % self.tmpdir]) config.init(eb_go.options, eb_go.get_options_by_section('config')) build_options = { 'external_modules_metadata': external_modules_metadata, 'valid_module_classes': config.module_classes(), } config.init_build_options(build_options=build_options) set_tmpdir() del eb_go modtool = modules_tool() # make sure no $EBROOT* or $EBVERSION* environment variables are set in current environment for key in os.environ: if any(key.startswith(x) for x in ['EBROOT', 'EBVERSION']): del os.environ[key] # create dummy module file for each of the external modules test_mod_path = os.path.join(self.tmpdir, 'modules', 'all') for mod in external_modules: write_file(os.path.join(test_mod_path, mod), "#%Module") modtool.use(test_mod_path) # test easyconfig file to install toolchain that uses external modules, # and enables set_env_external_modules test_ec_path = os.path.join(self.tmpdir, 'test.eb') test_ec_txt = '\n'.join([ "easyblock = 'Toolchain'", "name = 'test-toolchain'", "version = '1.2.3'", "homepage = 'https://example.com'", "description = 'just a test'", "toolchain = SYSTEM", "dependencies = [", " ('gcc/8.3.0', EXTERNAL_MODULE),", " ('openmpi/4.0.2', EXTERNAL_MODULE),", " ('openblas/0.3.7', EXTERNAL_MODULE),", " ('fftw/3.3.8', EXTERNAL_MODULE),", " ('scalapack/2.0.2', EXTERNAL_MODULE),", "]", "set_env_external_modules = True", "moduleclass = 'toolchain'", ]) write_file(test_ec_path, test_ec_txt) test_ec = process_easyconfig(test_ec_path)[0] # create easyblock & install module via run_all_steps tc_inst = get_easyblock_instance(test_ec) self.assertTrue(isinstance(tc_inst, Toolchain)) self.mock_stdout(True) tc_inst.run_all_steps(False) self.mock_stdout(False) # make sure expected module file exists test_mod = os.path.join(test_mod_path, 'test-toolchain', '1.2.3') if get_module_syntax() == 'Lua': test_mod += '.lua' self.assertTrue(os.path.exists(test_mod)) # load test-toolchain/1.2.3 module to get environment variable to check for defined modtool.load(['test-toolchain/1.2.3']) # check whether expected environment variables are defined self.assertEqual(os.environ.pop('EBROOTGCC'), '/software/gcc/8.3.0') self.assertEqual(os.environ.pop('EBVERSIONGCC'), '8.3.0') self.assertEqual(os.environ.pop('EBVERSIONOPENMPI'), '4.0.2') self.assertEqual(os.environ.pop('EBROOTOPENBLAS'), '/software/openblas/0.3.7') undefined_env_vars = [ 'EBROOTOPENMPI', # no prefix in metadata 'EBVERSIONOPENBLAS' # no version in metadata 'EBROOTFFTW', 'EBVERSIONFFTW', # no name in metadata 'EBROOTSCALAPACK', 'EBVERSIONSCALAPACK', # no metadata ] for env_var in undefined_env_vars: self.assertTrue(os.getenv(env_var) is None) # make sure no unexpected $EBROOT* or $EBVERSION* environment variables were defined del os.environ['EBROOTTESTMINTOOLCHAIN'] del os.environ['EBVERSIONTESTMINTOOLCHAIN'] extra_eb_env_vars = [] for key in os.environ: if any(key.startswith(x) for x in ['EBROOT', 'EBVERSION']): extra_eb_env_vars.append(key) self.assertEqual(extra_eb_env_vars, [])
def main(testing_data=(None, None, None)): """ Main function: @arg options: a tuple: (options, paths, logger, logfile, hn) as defined in parse_options This function will: - read easyconfig - build software """ # purposely session state very early, to avoid modules loaded by EasyBuild meddling in init_session_state = session_state() # disallow running EasyBuild as root if os.getuid() == 0: sys.stderr.write( "ERROR: You seem to be running EasyBuild with root privileges.\n" "That's not wise, so let's end this here.\n" "Exiting.\n") sys.exit(1) # steer behavior when testing main testing = testing_data[0] is not None args, logfile, do_build = testing_data # initialise options eb_go = eboptions.parse_options(args=args) options = eb_go.options orig_paths = eb_go.args eb_config = eb_go.generate_cmd_line(add_default=True) init_session_state.update({'easybuild_configuration': eb_config}) # set umask (as early as possible) if options.umask is not None: new_umask = int(options.umask, 8) old_umask = os.umask(new_umask) # set temporary directory to use eb_tmpdir = set_tmpdir(options.tmpdir) # initialise logging for main if options.logtostdout: fancylogger.logToScreen(enable=True, stdout=True) else: if logfile is None: # mkstemp returns (fd,filename), fd is from os.open, not regular open! fd, logfile = tempfile.mkstemp(suffix='.log', prefix='easybuild-') os.close(fd) fancylogger.logToFile(logfile) print_msg('temporary log file in case of crash %s' % (logfile), log=None, silent=testing) global _log _log = fancylogger.getLogger(fname=False) if options.umask is not None: _log.info("umask set to '%s' (used to be '%s')" % (oct(new_umask), oct(old_umask))) # hello world! _log.info(this_is_easybuild()) # how was EB called? eb_command_line = eb_go.generate_cmd_line() + eb_go.args _log.info("Command line: %s" % (" ".join(eb_command_line))) _log.info("Using %s as temporary directory" % eb_tmpdir) if not options.robot is None: if options.robot: _log.info("Using robot path(s): %s" % options.robot) else: _log.error( "No robot paths specified, and unable to determine easybuild-easyconfigs install path." ) # do not pass options.robot, it's not a list instance (and it shouldn't be modified) robot_path = None if options.robot: robot_path = list(options.robot) # determine easybuild-easyconfigs package install path easyconfigs_paths = get_paths_for("easyconfigs", robot_path=robot_path) # keep track of paths for install easyconfigs, so we can obtain find specified easyconfigs easyconfigs_pkg_full_paths = easyconfigs_paths[:] if not easyconfigs_paths: _log.warning( "Failed to determine install path for easybuild-easyconfigs package." ) # process software build specifications (if any), i.e. # software name/version, toolchain name/version, extra patches, ... (try_to_generate, build_specs) = process_software_build_specs(options) # specified robot paths are preferred over installed easyconfig files # --try-X and --dep-graph both require --robot, so enable it with path of installed easyconfigs if robot_path or try_to_generate or options.dep_graph: if robot_path is None: robot_path = [] robot_path.extend(easyconfigs_paths) easyconfigs_paths = robot_path[:] _log.info( "Extended list of robot paths with paths for installed easyconfigs: %s" % robot_path) # initialise the easybuild configuration config.init(options, eb_go.get_options_by_section('config')) # building a dependency graph implies force, so that all dependencies are retained # and also skips validation of easyconfigs (e.g. checking os dependencies) retain_all_deps = False if options.dep_graph: _log.info("Enabling force to generate dependency graph.") options.force = True retain_all_deps = True config.init_build_options({ 'aggregate_regtest': options.aggregate_regtest, 'allow_modules_tool_mismatch': options.allow_modules_tool_mismatch, 'check_osdeps': not options.ignore_osdeps, 'cleanup_builddir': options.cleanup_builddir, 'command_line': eb_command_line, 'debug': options.debug, 'dry_run': options.dry_run, 'easyblock': options.easyblock, 'experimental': options.experimental, 'force': options.force, 'github_user': options.github_user, 'group': options.group, 'ignore_dirs': options.ignore_dirs, 'modules_footer': options.modules_footer, 'only_blocks': options.only_blocks, 'recursive_mod_unload': options.recursive_module_unload, 'regtest_output_dir': options.regtest_output_dir, 'retain_all_deps': retain_all_deps, 'robot_path': robot_path, 'sequential': options.sequential, 'silent': testing, 'set_gid_bit': options.set_gid_bit, 'skip': options.skip, 'skip_test_cases': options.skip_test_cases, 'sticky_bit': options.sticky_bit, 'stop': options.stop, 'umask': options.umask, 'valid_module_classes': module_classes(), 'valid_stops': [x[0] for x in EasyBlock.get_steps()], 'validate': not options.force, }) # obtain list of loaded modules, build options must be initialized first modlist = session_module_list() init_session_state.update({'module_list': modlist}) _log.debug("Initial session state: %s" % init_session_state) # search for easyconfigs if options.search or options.search_short: search_path = [os.getcwd()] if easyconfigs_paths: search_path = easyconfigs_paths query = options.search or options.search_short ignore_dirs = config.build_option('ignore_dirs') silent = config.build_option('silent') search_file(search_path, query, short=not options.search, ignore_dirs=ignore_dirs, silent=silent) paths = [] if len(orig_paths) == 0: if options.from_pr: pr_path = os.path.join(eb_tmpdir, "files_pr%s" % options.from_pr) pr_files = fetch_easyconfigs_from_pr( options.from_pr, path=pr_path, github_user=options.github_user) paths = [(path, False) for path in pr_files if path.endswith('.eb')] elif 'name' in build_specs: paths = [ obtain_path(build_specs, easyconfigs_paths, try_to_generate=try_to_generate, exit_on_error=not testing) ] elif not any([ options.aggregate_regtest, options.search, options.search_short, options.regtest ]): print_error(( "Please provide one or multiple easyconfig files, or use software build " "options to make EasyBuild search for easyconfigs"), log=_log, opt_parser=eb_go.parser, exit_on_error=not testing) else: # look for easyconfigs with relative paths in easybuild-easyconfigs package, # unless they were found at the given relative paths if easyconfigs_pkg_full_paths: # determine which easyconfigs files need to be found, if any ecs_to_find = [] for idx, orig_path in enumerate(orig_paths): if orig_path == os.path.basename( orig_path) and not os.path.exists(orig_path): ecs_to_find.append((idx, orig_path)) _log.debug("List of easyconfig files to find: %s" % ecs_to_find) # find missing easyconfigs by walking paths with installed easyconfig files for path in easyconfigs_pkg_full_paths: _log.debug( "Looking for missing easyconfig files (%d left) in %s..." % (len(ecs_to_find), path)) for (subpath, dirnames, filenames) in os.walk(path, topdown=True): for idx, orig_path in ecs_to_find[:]: if orig_path in filenames: full_path = os.path.join(subpath, orig_path) _log.info("Found %s in %s: %s" % (orig_path, path, full_path)) orig_paths[idx] = full_path # if file was found, stop looking for it (first hit wins) ecs_to_find.remove((idx, orig_path)) # stop os.walk insanity as soon as we have all we need (os.walk loop) if len(ecs_to_find) == 0: break # ignore subdirs specified to be ignored by replacing items in dirnames list used by os.walk dirnames[:] = [ d for d in dirnames if not d in options.ignore_dirs ] # stop os.walk insanity as soon as we have all we need (paths loop) if len(ecs_to_find) == 0: break # indicate that specified paths do not contain generated easyconfig files paths = [(path, False) for path in orig_paths] _log.debug("Paths: %s" % paths) # run regtest if options.regtest or options.aggregate_regtest: _log.info("Running regression test") if paths: ec_paths = [path[0] for path in paths] else: # fallback: easybuild-easyconfigs install path ec_paths = easyconfigs_pkg_full_paths regtest_ok = regtest(ec_paths) if not regtest_ok: _log.info("Regression test failed (partially)!") sys.exit(31) # exit -> 3x1t -> 31 # read easyconfig files easyconfigs = [] for (path, generated) in paths: path = os.path.abspath(path) if not os.path.exists(path): print_error("Can't find path %s" % path) try: ec_files = find_easyconfigs(path, ignore_dirs=options.ignore_dirs) for ec_file in ec_files: # only pass build specs when not generating easyconfig files if try_to_generate: ecs = process_easyconfig(ec_file) else: ecs = process_easyconfig(ec_file, build_specs=build_specs) easyconfigs.extend(ecs) except IOError, err: _log.error("Processing easyconfigs in path %s failed: %s" % (path, err))
def resolve_dependencies(unprocessed, build_specs=None, retain_all_deps=False): """ Work through the list of easyconfigs to determine an optimal order @param unprocessed: list of easyconfigs @param build_specs: dictionary specifying build specifications (e.g. version, toolchain, ...) """ robot = build_option('robot_path') retain_all_deps = build_option('retain_all_deps') or retain_all_deps if retain_all_deps: # assume that no modules are available when forced, to retain all dependencies avail_modules = [] _log.info("Forcing all dependencies to be retained.") else: # Get a list of all available modules (format: [(name, installversion), ...]) avail_modules = modules_tool().available() if len(avail_modules) == 0: _log.warning("No installed modules. Your MODULEPATH is probably incomplete: %s" % os.getenv('MODULEPATH')) ordered_ecs = [] # all available modules can be used for resolving dependencies except those that will be installed being_installed = [p['module'] for p in unprocessed] avail_modules = [m for m in avail_modules if not m in being_installed] _log.debug('unprocessed before resolving deps: %s' % unprocessed) # resolve all dependencies, put a safeguard in place to avoid an infinite loop (shouldn't occur though) irresolvable = [] loopcnt = 0 maxloopcnt = 10000 while unprocessed: # make sure this stops, we really don't want to get stuck in an infinite loop loopcnt += 1 if loopcnt > maxloopcnt: tup = (maxloopcnt, unprocessed, irresolvable) msg = "Maximum loop cnt %s reached, so quitting (unprocessed: %s, irresolvable: %s)" % tup _log.error(msg) # first try resolving dependencies without using external dependencies last_processed_count = -1 while len(avail_modules) > last_processed_count: last_processed_count = len(avail_modules) more_ecs, unprocessed, avail_modules = find_resolved_modules(unprocessed, avail_modules) for ec in more_ecs: if not ec['module'] in [x['module'] for x in ordered_ecs]: ordered_ecs.append(ec) # robot: look for existing dependencies, add them if robot and unprocessed: being_installed = [det_full_module_name(p['ec'], eb_ns=True) for p in unprocessed] additional = [] for i, entry in enumerate(unprocessed): # do not choose an entry that is being installed in the current run # if they depend, you probably want to rebuild them using the new dependency deps = entry['dependencies'] candidates = [d for d in deps if not det_full_module_name(d, eb_ns=True) in being_installed] if len(candidates) > 0: cand_dep = candidates[0] # find easyconfig, might not find any _log.debug("Looking for easyconfig for %s" % str(cand_dep)) # note: robot_find_easyconfig may return None path = robot_find_easyconfig(robot, cand_dep['name'], det_full_ec_version(cand_dep)) if path is None: # no easyconfig found for dependency, add to list of irresolvable dependencies if cand_dep not in irresolvable: _log.debug("Irresolvable dependency found: %s" % cand_dep) irresolvable.append(cand_dep) # remove irresolvable dependency from list of dependencies so we can continue entry['dependencies'].remove(cand_dep) else: _log.info("Robot: resolving dependency %s with %s" % (cand_dep, path)) # build specs should not be passed down to resolved dependencies, # to avoid that e.g. --try-toolchain trickles down into the used toolchain itself processed_ecs = process_easyconfig(path, validate=not retain_all_deps) # ensure that selected easyconfig provides required dependency mods = [det_full_module_name(spec['ec']) for spec in processed_ecs] dep_mod_name = det_full_module_name(cand_dep) if not dep_mod_name in mods: tup = (path, dep_mod_name, mods) _log.error("easyconfig file %s does not contain module %s (mods: %s)" % tup) for ec in processed_ecs: if not ec in unprocessed + additional: additional.append(ec) _log.debug("Added %s as dependency of %s" % (ec, entry)) else: mod_name = det_full_module_name(entry['ec'], eb_ns=True) _log.debug("No more candidate dependencies to resolve for %s" % mod_name) # add additional (new) easyconfigs to list of stuff to process unprocessed.extend(additional) elif not robot: # no use in continuing if robot is not enabled, dependencies won't be resolved anyway irresolvable = [dep for x in unprocessed for dep in x['dependencies']] break if irresolvable: irresolvable_mod_deps = [(det_full_module_name(dep, eb_ns=True), dep) for dep in irresolvable] _log.error('Irresolvable dependencies encountered: %s' % irresolvable_mod_deps) _log.info("Dependency resolution complete, building as follows:\n%s" % ordered_ecs) return ordered_ecs
def tweak(easyconfigs, build_specs, modtool, targetdirs=None): """Tweak list of easyconfigs according to provided build specifications.""" tweaked_ecs_path, tweaked_ecs_deps_path = None, None if targetdirs is not None: tweaked_ecs_path, tweaked_ecs_deps_path = targetdirs # make sure easyconfigs all feature the same toolchain (otherwise we *will* run into trouble) toolchains = nub( ['%(name)s/%(version)s' % ec['ec']['toolchain'] for ec in easyconfigs]) if len(toolchains) > 1: raise EasyBuildError( "Multiple toolchains featured in easyconfigs, --try-X not supported in that case: %s", toolchains) # Toolchain is unique, let's store it source_toolchain = easyconfigs[-1]['ec']['toolchain'] modifying_toolchains = False target_toolchain = {} src_to_dst_tc_mapping = {} revert_to_regex = False if 'toolchain_name' in build_specs or 'toolchain_version' in build_specs: keys = build_specs.keys() # Make sure there are no more build_specs, as combining --try-toolchain* with other options is currently not # supported if any(key not in ['toolchain_name', 'toolchain_version', 'toolchain'] for key in keys): print_warning( "Combining --try-toolchain* with other build options is not fully supported: using regex" ) revert_to_regex = True if not revert_to_regex: # we're doing something with the toolchain, # so build specifications should be applied to whole dependency graph; # obtain full dependency graph for specified easyconfigs; # easyconfigs will be ordered 'top-to-bottom' (toolchains and dependencies appearing first) modifying_toolchains = True if 'toolchain_name' in keys: target_toolchain['name'] = build_specs['toolchain_name'] else: target_toolchain['name'] = source_toolchain['name'] if 'toolchain_version' in keys: target_toolchain['version'] = build_specs['toolchain_version'] else: target_toolchain['version'] = source_toolchain['version'] if build_option('map_toolchains'): try: src_to_dst_tc_mapping = map_toolchain_hierarchies( source_toolchain, target_toolchain, modtool) except EasyBuildError as err: # make sure exception was raised by match_minimum_tc_specs because toolchain mapping didn't work if "No possible mapping from source toolchain" in err.msg: error_msg = err.msg + '\n' error_msg += "Toolchain %s is not equivalent to toolchain %s in terms of capabilities. " error_msg += "(If you know what you are doing, " error_msg += "you can use --disable-map-toolchains to proceed anyway.)" raise EasyBuildError(error_msg, target_toolchain['name'], source_toolchain['name']) else: # simply re-raise the exception if something else went wrong raise err else: msg = "Mapping of (sub)toolchains disabled, so falling back to regex mode, " msg += "disabling recursion and not changing (sub)toolchains for dependencies" _log.info(msg) revert_to_regex = True modifying_toolchains = False if not revert_to_regex: _log.debug( "Applying build specifications recursively (no software name/version found): %s", build_specs) orig_ecs = resolve_dependencies(easyconfigs, modtool, retain_all_deps=True) # Filter out the toolchain hierarchy (which would only appear if we are applying build_specs recursively) # We can leave any dependencies they may have as they will only be used if required (or originally listed) _log.debug("Filtering out toolchain hierarchy for %s", source_toolchain) i = 0 while i < len(orig_ecs): tc_names = [ tc['name'] for tc in get_toolchain_hierarchy(source_toolchain) ] if orig_ecs[i]['ec']['name'] in tc_names: # drop elements in toolchain hierarchy del orig_ecs[i] else: i += 1 else: revert_to_regex = True if revert_to_regex: # no recursion if software name/version build specification are included or we are amending something # in that case, do not construct full dependency graph orig_ecs = easyconfigs _log.debug( "Software name/version found, so not applying build specifications recursively: %s" % build_specs) # keep track of originally listed easyconfigs (via their path) listed_ec_paths = [ec['spec'] for ec in easyconfigs] # generate tweaked easyconfigs, and continue with those instead tweaked_easyconfigs = [] for orig_ec in orig_ecs: # Only return tweaked easyconfigs for easyconfigs which were listed originally on the command line # (and use the prepended path so that they are found first). # easyconfig files for dependencies are also generated but not included, they will be resolved via --robot # either from existing easyconfigs or, if that fails, from easyconfigs in the appended path tc_name = orig_ec['ec']['toolchain']['name'] new_ec_file = None verification_build_specs = copy.copy(build_specs) if orig_ec['spec'] in listed_ec_paths: if modifying_toolchains: if tc_name in src_to_dst_tc_mapping: new_ec_file = map_easyconfig_to_target_tc_hierarchy( orig_ec['spec'], src_to_dst_tc_mapping, tweaked_ecs_path) # Need to update the toolchain in the build_specs to match the toolchain mapping keys = verification_build_specs.keys() if 'toolchain_name' in keys: verification_build_specs[ 'toolchain_name'] = src_to_dst_tc_mapping[tc_name][ 'name'] if 'toolchain_version' in keys: verification_build_specs[ 'toolchain_version'] = src_to_dst_tc_mapping[ tc_name]['version'] if 'toolchain' in keys: verification_build_specs[ 'toolchain'] = src_to_dst_tc_mapping[tc_name] else: new_ec_file = tweak_one(orig_ec['spec'], None, build_specs, targetdir=tweaked_ecs_path) if new_ec_file: new_ecs = process_easyconfig( new_ec_file, build_specs=verification_build_specs) tweaked_easyconfigs.extend(new_ecs) else: # Place all tweaked dependency easyconfigs in the directory appended to the robot path if modifying_toolchains: if tc_name in src_to_dst_tc_mapping: new_ec_file = map_easyconfig_to_target_tc_hierarchy( orig_ec['spec'], src_to_dst_tc_mapping, targetdir=tweaked_ecs_deps_path) else: new_ec_file = tweak_one(orig_ec['spec'], None, build_specs, targetdir=tweaked_ecs_deps_path) return tweaked_easyconfigs
def get_toolchain_hierarchy(parent_toolchain): """ Determine list of subtoolchains for specified parent toolchain. Result starts with the most minimal subtoolchains first, ends with specified toolchain. The dummy toolchain is considered the most minimal subtoolchain only if the add_dummy_to_minimal_toolchains build option is enabled. @param parent_toolchain: dictionary with name/version of parent toolchain """ # obtain list of all possible subtoolchains _, all_tc_classes = search_toolchain('') subtoolchains = dict( (tc_class.NAME, getattr(tc_class, 'SUBTOOLCHAIN', None)) for tc_class in all_tc_classes) current_tc_name, current_tc_version = parent_toolchain[ 'name'], parent_toolchain['version'] subtoolchain_name, subtoolchain_version = subtoolchains[ current_tc_name], None # the parent toolchain is at the top of the hierarchy toolchain_hierarchy = [parent_toolchain] while subtoolchain_name: # grab the easyconfig of the current toolchain and search the dependencies for a version of the subtoolchain path = robot_find_easyconfig(current_tc_name, current_tc_version) if path is None: raise EasyBuildError( "Could not find easyconfig for %(name)s toolchain version %(version)s", current_tc_name, current_tc_version) # parse the easyconfig parsed_ec = process_easyconfig(path)[0] # search the dependencies for the version of the subtoolchain dep_tcs = [ dep_toolchain['toolchain'] for dep_toolchain in parsed_ec['dependencies'] if dep_toolchain['toolchain']['name'] == subtoolchain_name ] unique_dep_tc_versions = set([dep_tc['version'] for dep_tc in dep_tcs]) if len(unique_dep_tc_versions) == 1: subtoolchain_version = dep_tcs[0]['version'] elif len(unique_dep_tc_versions) == 0: # only retain GCCcore as subtoolchain if version was found if subtoolchain_name == GCCcore.NAME: _log.info( "No version found for %s; assuming legacy toolchain and skipping it as subtoolchain.", subtoolchain_name) subtoolchain_name = GCCcore.SUBTOOLCHAIN subtoolchain_version = '' # dummy toolchain: end of the line elif subtoolchain_name == DUMMY_TOOLCHAIN_NAME: subtoolchain_version = '' else: raise EasyBuildError( "No version found for subtoolchain %s in dependencies of %s", subtoolchain_name, current_tc_name) else: raise EasyBuildError( "Multiple versions of %s found in dependencies of toolchain %s: %s", subtoolchain_name, current_tc_name, unique_dep_tc_versions) if subtoolchain_name == DUMMY_TOOLCHAIN_NAME and not build_option( 'add_dummy_to_minimal_toolchains'): # we're done break # add to hierarchy and move to next current_tc_name, current_tc_version = subtoolchain_name, subtoolchain_version subtoolchain_name, subtoolchain_version = subtoolchains[ current_tc_name], None toolchain_hierarchy.insert(0, { 'name': current_tc_name, 'version': current_tc_version }) _log.info("Found toolchain hierarchy for toolchain %s: %s", parent_toolchain, toolchain_hierarchy) return toolchain_hierarchy
def main(testing_data=(None, None, None)): """ Main function: @arg options: a tuple: (options, paths, logger, logfile, hn) as defined in parse_options This function will: - read easyconfig - build software """ # disallow running EasyBuild as root if os.getuid() == 0: sys.stderr.write("ERROR: You seem to be running EasyBuild with root privileges.\n" "That's not wise, so let's end this here.\n" "Exiting.\n") sys.exit(1) # steer behavior when testing main testing = testing_data[0] is not None args, logfile, do_build = testing_data # initialise options eb_go = eboptions.parse_options(args=args) options = eb_go.options orig_paths = eb_go.args # set umask (as early as possible) if options.umask is not None: new_umask = int(options.umask, 8) old_umask = os.umask(new_umask) # set temporary directory to use eb_tmpdir = set_tmpdir(options.tmpdir) # initialise logging for main if options.logtostdout: fancylogger.logToScreen(enable=True, stdout=True) else: if logfile is None: # mkstemp returns (fd,filename), fd is from os.open, not regular open! fd, logfile = tempfile.mkstemp(suffix='.log', prefix='easybuild-') os.close(fd) fancylogger.logToFile(logfile) print_msg('temporary log file in case of crash %s' % (logfile), log=None, silent=testing) global _log _log = fancylogger.getLogger(fname=False) if options.umask is not None: _log.info("umask set to '%s' (used to be '%s')" % (oct(new_umask), oct(old_umask))) # hello world! _log.info(this_is_easybuild()) # how was EB called? eb_command_line = eb_go.generate_cmd_line() + eb_go.args _log.info("Command line: %s" % (" ".join(eb_command_line))) _log.info("Using %s as temporary directory" % eb_tmpdir) if not options.robot is None: if options.robot: _log.info("Using robot path(s): %s" % options.robot) else: _log.error("No robot paths specified, and unable to determine easybuild-easyconfigs install path.") # do not pass options.robot, it's not a list instance (and it shouldn't be modified) robot_path = None if options.robot: robot_path = list(options.robot) # determine easybuild-easyconfigs package install path easyconfigs_paths = get_paths_for("easyconfigs", robot_path=robot_path) # keep track of paths for install easyconfigs, so we can obtain find specified easyconfigs easyconfigs_pkg_full_paths = easyconfigs_paths[:] if not easyconfigs_paths: _log.warning("Failed to determine install path for easybuild-easyconfigs package.") # specified robot paths are preferred over installed easyconfig files if robot_path: robot_path.extend(easyconfigs_paths) easyconfigs_paths = robot_path[:] _log.info("Extended list of robot paths with paths for installed easyconfigs: %s" % robot_path) # initialise the easybuild configuration config.init(options, eb_go.get_options_by_section('config')) # building a dependency graph implies force, so that all dependencies are retained # and also skips validation of easyconfigs (e.g. checking os dependencies) retain_all_deps = False if options.dep_graph: _log.info("Enabling force to generate dependency graph.") options.force = True retain_all_deps = True config.init_build_options({ 'aggregate_regtest': options.aggregate_regtest, 'allow_modules_tool_mismatch': options.allow_modules_tool_mismatch, 'check_osdeps': not options.ignore_osdeps, 'command_line': eb_command_line, 'debug': options.debug, 'dry_run': options.dry_run, 'easyblock': options.easyblock, 'experimental': options.experimental, 'force': options.force, 'group': options.group, 'ignore_dirs': options.ignore_dirs, 'modules_footer': options.modules_footer, 'only_blocks': options.only_blocks, 'recursive_mod_unload': options.recursive_module_unload, 'regtest_online': options.regtest_online, 'regtest_output_dir': options.regtest_output_dir, 'retain_all_deps': retain_all_deps, 'robot_path': robot_path, 'sequential': options.sequential, 'silent': testing, 'set_gid_bit': options.set_gid_bit, 'skip': options.skip, 'skip_test_cases': options.skip_test_cases, 'sticky_bit': options.sticky_bit, 'stop': options.stop, 'umask': options.umask, 'valid_module_classes': module_classes(), 'valid_stops': [x[0] for x in EasyBlock.get_steps()], 'validate': not options.force, }) # search for easyconfigs if options.search or options.search_short: search_path = [os.getcwd()] if easyconfigs_paths: search_path = easyconfigs_paths query = options.search or options.search_short ignore_dirs = config.build_option('ignore_dirs') silent = config.build_option('silent') search_file(search_path, query, short=not options.search, ignore_dirs=ignore_dirs, silent=silent) # process software build specifications (if any), i.e. # software name/version, toolchain name/version, extra patches, ... (try_to_generate, build_specs) = process_software_build_specs(options) paths = [] if len(orig_paths) == 0: if 'name' in build_specs: paths = [obtain_path(build_specs, easyconfigs_paths, try_to_generate=try_to_generate, exit_on_error=not testing)] elif not any([options.aggregate_regtest, options.search, options.search_short, options.regtest]): print_error(("Please provide one or multiple easyconfig files, or use software build " "options to make EasyBuild search for easyconfigs"), log=_log, opt_parser=eb_go.parser, exit_on_error=not testing) else: # look for easyconfigs with relative paths in easybuild-easyconfigs package, # unless they were found at the given relative paths if easyconfigs_pkg_full_paths: # determine which easyconfigs files need to be found, if any ecs_to_find = [] for idx, orig_path in enumerate(orig_paths): if orig_path == os.path.basename(orig_path) and not os.path.exists(orig_path): ecs_to_find.append((idx, orig_path)) _log.debug("List of easyconfig files to find: %s" % ecs_to_find) # find missing easyconfigs by walking paths with installed easyconfig files for path in easyconfigs_pkg_full_paths: _log.debug("Looking for missing easyconfig files (%d left) in %s..." % (len(ecs_to_find), path)) for (subpath, dirnames, filenames) in os.walk(path, topdown=True): for idx, orig_path in ecs_to_find[:]: if orig_path in filenames: full_path = os.path.join(subpath, orig_path) _log.info("Found %s in %s: %s" % (orig_path, path, full_path)) orig_paths[idx] = full_path # if file was found, stop looking for it (first hit wins) ecs_to_find.remove((idx, orig_path)) # stop os.walk insanity as soon as we have all we need (os.walk loop) if len(ecs_to_find) == 0: break # ignore subdirs specified to be ignored by replacing items in dirnames list used by os.walk dirnames[:] = [d for d in dirnames if not d in options.ignore_dirs] # stop os.walk insanity as soon as we have all we need (paths loop) if len(ecs_to_find) == 0: break # indicate that specified paths do not contain generated easyconfig files paths = [(path, False) for path in orig_paths] _log.debug("Paths: %s" % paths) # run regtest if options.regtest or options.aggregate_regtest: _log.info("Running regression test") if paths: ec_paths = [path[0] for path in paths] else: # fallback: easybuild-easyconfigs install path ec_paths = easyconfigs_pkg_full_paths regtest_ok = regtest(ec_paths) if not regtest_ok: _log.info("Regression test failed (partially)!") sys.exit(31) # exit -> 3x1t -> 31 # read easyconfig files easyconfigs = [] for (path, generated) in paths: path = os.path.abspath(path) if not os.path.exists(path): print_error("Can't find path %s" % path) try: files = find_easyconfigs(path, ignore_dirs=options.ignore_dirs) for f in files: if not generated and try_to_generate and build_specs: ec_file = tweak(f, None, build_specs) else: ec_file = f ecs = process_easyconfig(ec_file, build_specs=build_specs) easyconfigs.extend(ecs) except IOError, err: _log.error("Processing easyconfigs in path %s failed: %s" % (path, err))
def get_toolchain_hierarchy(parent_toolchain): """ Determine list of subtoolchains for specified parent toolchain. Result starts with the most minimal subtoolchains first, ends with specified toolchain. The dummy toolchain is considered the most minimal subtoolchain only if the add_dummy_to_minimal_toolchains build option is enabled. @param parent_toolchain: dictionary with name/version of parent toolchain """ # obtain list of all possible subtoolchains _, all_tc_classes = search_toolchain('') subtoolchains = dict((tc_class.NAME, getattr(tc_class, 'SUBTOOLCHAIN', None)) for tc_class in all_tc_classes) current_tc_name, current_tc_version = parent_toolchain['name'], parent_toolchain['version'] subtoolchain_name, subtoolchain_version = subtoolchains[current_tc_name], None # the parent toolchain is at the top of the hierarchy toolchain_hierarchy = [parent_toolchain] while subtoolchain_name: # grab the easyconfig of the current toolchain and search the dependencies for a version of the subtoolchain path = robot_find_easyconfig(current_tc_name, current_tc_version) if path is None: raise EasyBuildError("Could not find easyconfig for %(name)s toolchain version %(version)s", current_tc_name, current_tc_version) # parse the easyconfig parsed_ec = process_easyconfig(path)[0] # search the dependencies for the version of the subtoolchain dep_tcs = [dep_toolchain['toolchain'] for dep_toolchain in parsed_ec['dependencies'] if dep_toolchain['toolchain']['name'] == subtoolchain_name] unique_dep_tc_versions = set([dep_tc['version'] for dep_tc in dep_tcs]) if len(unique_dep_tc_versions) == 1: subtoolchain_version = dep_tcs[0]['version'] elif len(unique_dep_tc_versions) == 0: # only retain GCCcore as subtoolchain if version was found if subtoolchain_name == GCCcore.NAME: _log.info("No version found for %s; assuming legacy toolchain and skipping it as subtoolchain.", subtoolchain_name) subtoolchain_name = GCCcore.SUBTOOLCHAIN subtoolchain_version = '' # dummy toolchain: end of the line elif subtoolchain_name == DUMMY_TOOLCHAIN_NAME: subtoolchain_version = '' else: raise EasyBuildError("No version found for subtoolchain %s in dependencies of %s", subtoolchain_name, current_tc_name) else: raise EasyBuildError("Multiple versions of %s found in dependencies of toolchain %s: %s", subtoolchain_name, current_tc_name, unique_dep_tc_versions) if subtoolchain_name == DUMMY_TOOLCHAIN_NAME and not build_option('add_dummy_to_minimal_toolchains'): # we're done break # add to hierarchy and move to next current_tc_name, current_tc_version = subtoolchain_name, subtoolchain_version subtoolchain_name, subtoolchain_version = subtoolchains[current_tc_name], None toolchain_hierarchy.insert(0, {'name': current_tc_name, 'version': current_tc_version}) _log.info("Found toolchain hierarchy for toolchain %s: %s", parent_toolchain, toolchain_hierarchy) return toolchain_hierarchy
def test_resolve_dependencies_minimal(self): """Test resolved dependencies with minimal toolchain.""" # replace log.experimental with log.warning to allow experimental code easybuild.framework.easyconfig.tools._log.experimental = easybuild.framework.easyconfig.tools._log.warning test_easyconfigs = os.path.join( os.path.dirname(os.path.abspath(__file__)), 'easyconfigs') install_mock_module() init_config( build_options={ 'allow_modules_tool_mismatch': True, 'external_modules_metadata': ConfigObj(), 'robot_path': test_easyconfigs, 'valid_module_classes': module_classes(), 'validate': False, }) barec = os.path.join(self.test_prefix, 'bar-1.2.3-goolf-1.4.10.eb') barec_lines = [ "easyblock = 'ConfigureMake'", "name = 'bar'", "version = '1.2.3'", "homepage = 'http://example.com'", "description = 'foo'", # deliberately listing components of toolchain as dependencies without specifying subtoolchains, # to test resolving of dependencies with minimal toolchain # for each of these, we know test easyconfigs are available (which are required here) "dependencies = [", " ('OpenMPI', '1.6.4'),", # available with GCC/4.7.2 " ('OpenBLAS', '0.2.6', '-LAPACK-3.4.2'),", # available with gompi/1.4.10 " ('ScaLAPACK', '2.0.2', '-OpenBLAS-0.2.6-LAPACK-3.4.2'),", # available with gompi/1.4.10 " ('SQLite', '3.8.10.2'),", "]", # toolchain as list line, for easy modification later "toolchain = {'name': 'goolf', 'version': '1.4.10'}", ] write_file(barec, '\n'.join(barec_lines)) bar = process_easyconfig(barec)[0] # all modules in the dep graph, in order all_mods_ordered = [ 'GCC/4.7.2', 'hwloc/1.6.2-GCC-4.7.2', 'OpenMPI/1.6.4-GCC-4.7.2', 'gompi/1.4.10', 'OpenBLAS/0.2.6-gompi-1.4.10-LAPACK-3.4.2', 'ScaLAPACK/2.0.2-gompi-1.4.10-OpenBLAS-0.2.6-LAPACK-3.4.2', 'SQLite/3.8.10.2-GCC-4.7.2', 'FFTW/3.3.3-gompi-1.4.10', 'goolf/1.4.10', 'bar/1.2.3-goolf-1.4.10', ] # no modules available, so all dependencies are retained MockModule.avail_modules = [] res = resolve_dependencies([bar], minimal_toolchains=True) self.assertEqual(len(res), 10) self.assertEqual([x['full_mod_name'] for x in res], all_mods_ordered) # cleanup shutil.rmtree( os.path.join(tempfile.gettempdir(), 'minimal-easyconfigs')) MockModule.avail_modules = [ 'GCC/4.7.2', 'gompi/1.4.10', 'goolf/1.4.10', 'OpenMPI/1.6.4-GCC-4.7.2', 'OpenBLAS/0.2.6-gompi-1.4.10-LAPACK-3.4.2', 'ScaLAPACK/2.0.2-gompi-1.4.10-OpenBLAS-0.2.6-LAPACK-3.4.2', 'SQLite/3.8.10.2-GCC-4.7.2', ] # test resolving dependencies with minimal toolchain (rather than using goolf/1.4.10 for all of them) # existing modules are *not* taken into account when determining minimal subtoolchain, by default res = resolve_dependencies([bar], minimal_toolchains=True) self.assertEqual(len(res), 1) self.assertEqual(res[0]['full_mod_name'], bar['ec'].full_mod_name) # cleanup shutil.rmtree( os.path.join(tempfile.gettempdir(), 'minimal-easyconfigs')) # test retaining all dependencies, regardless of whether modules are available or not res = resolve_dependencies([bar], minimal_toolchains=True, retain_all_deps=True) self.assertEqual(len(res), 10) mods = [x['full_mod_name'] for x in res] self.assertEqual(mods, all_mods_ordered) self.assertTrue('SQLite/3.8.10.2-GCC-4.7.2' in mods) # cleanup shutil.rmtree( os.path.join(tempfile.gettempdir(), 'minimal-easyconfigs')) # test taking into account existing modules # with an SQLite module with goolf/1.4.10 in place, this toolchain should be used rather than GCC/4.7.2 MockModule.avail_modules = [ 'SQLite/3.8.10.2-goolf-1.4.10', ] res = resolve_dependencies([bar], minimal_toolchains=True, retain_all_deps=True, use_existing_modules=True) self.assertEqual(len(res), 10) mods = [x['full_mod_name'] for x in res] self.assertTrue('SQLite/3.8.10.2-goolf-1.4.10' in mods) self.assertFalse('SQLite/3.8.10.2-GCC-4.7.2' in mods)
def list_software(output_format=FORMAT_TXT, detailed=False, only_installed=False): """ Show list of supported software :param output_format: output format to use :param detailed: whether or not to return detailed information (incl. version, versionsuffix, toolchain info) :param only_installed: only retain software for which a corresponding module is available :return: multi-line string presenting requested info """ silent = build_option('silent') ec_paths = find_matching_easyconfigs('*', '*', build_option('robot_path') or []) ecs = [] cnt = len(ec_paths) for idx, ec_path in enumerate(ec_paths): # full EasyConfig instance is only required when module name is needed # this is significantly slower (5-10x) than a 'shallow' parse via EasyConfigParser if only_installed: ec = process_easyconfig(ec_path, validate=False, parse_only=True)[0]['ec'] else: ec = EasyConfigParser(filename=ec_path).get_config_dict() ecs.append(ec) print_msg('\r', prefix=False, newline=False, silent=silent) print_msg("Processed %d/%d easyconfigs..." % (idx + 1, cnt), newline=False, silent=silent) print_msg('', prefix=False, silent=silent) software = {} for ec in ecs: software.setdefault(ec['name'], []) if is_system_toolchain(ec['toolchain']['name']): toolchain = SYSTEM_TOOLCHAIN_NAME else: toolchain = '%s/%s' % (ec['toolchain']['name'], ec['toolchain']['version']) keys = ['description', 'homepage', 'version', 'versionsuffix'] info = {'toolchain': toolchain} for key in keys: info[key] = ec.get(key, '') # make sure values like homepage & versionsuffix get properly templated if isinstance(ec, dict): template_values = template_constant_dict(ec) for key in keys: if '%(' in info[key]: try: info[key] = info[key] % template_values except (KeyError, TypeError, ValueError) as err: _log.debug("Ignoring failure to resolve templates: %s", err) software[ec['name']].append(info) if only_installed: software[ec['name']][-1].update({'mod_name': ec.full_mod_name}) print_msg("Found %d different software packages" % len(software), silent=silent) if only_installed: avail_mod_names = modules_tool().available() # rebuild software, only retain entries with a corresponding available module software, all_software = {}, software for key in all_software: for entry in all_software[key]: if entry['mod_name'] in avail_mod_names: software.setdefault(key, []).append(entry) print_msg("Retained %d installed software packages" % len(software), silent=silent) return generate_doc('list_software_%s' % output_format, [software, detailed])
def list_software(output_format=FORMAT_TXT, detailed=False, only_installed=False): """ Show list of supported software :param output_format: output format to use :param detailed: whether or not to return detailed information (incl. version, versionsuffix, toolchain info) :param only_installed: only retain software for which a corresponding module is available :return: multi-line string presenting requested info """ silent = build_option("silent") ec_paths = find_matching_easyconfigs("*", "*", build_option("robot_path") or []) ecs = [] cnt = len(ec_paths) for idx, ec_path in enumerate(ec_paths): # full EasyConfig instance is only required when module name is needed # this is significantly slower (5-10x) than a 'shallow' parse via EasyConfigParser if only_installed: ec = process_easyconfig(ec_path, validate=False, parse_only=True)[0]["ec"] else: ec = EasyConfigParser(filename=ec_path).get_config_dict() ecs.append(ec) print_msg("\r", prefix=False, newline=False, silent=silent) print_msg("Processed %d/%d easyconfigs..." % (idx + 1, cnt), newline=False, silent=silent) print_msg("", prefix=False, silent=silent) software = {} for ec in ecs: software.setdefault(ec["name"], []) if ec["toolchain"]["name"] == DUMMY_TOOLCHAIN_NAME: toolchain = DUMMY_TOOLCHAIN_NAME else: toolchain = "%s/%s" % (ec["toolchain"]["name"], ec["toolchain"]["version"]) versionsuffix = ec.get("versionsuffix", "") # make sure versionsuffix gets properly templated if versionsuffix and isinstance(ec, dict): template_values = template_constant_dict(ec) versionsuffix = versionsuffix % template_values software[ec["name"]].append( { "description": ec["description"], "homepage": ec["homepage"], "toolchain": toolchain, "version": ec["version"], "versionsuffix": versionsuffix, } ) if only_installed: software[ec["name"]][-1].update({"mod_name": ec.full_mod_name}) print_msg("Found %d different software packages" % len(software), silent=silent) if only_installed: avail_mod_names = modules_tool().available() # rebuild software, only retain entries with a corresponding available module software, all_software = {}, software for key in all_software: for entry in all_software[key]: if entry["mod_name"] in avail_mod_names: software.setdefault(key, []).append(entry) print_msg("Retained %d installed software packages" % len(software), silent=silent) return generate_doc("list_software_%s" % output_format, [software, detailed])
def resolve_dependencies(unprocessed, build_specs=None, retain_all_deps=False): """ Work through the list of easyconfigs to determine an optimal order @param unprocessed: list of easyconfigs @param build_specs: dictionary specifying build specifications (e.g. version, toolchain, ...) @param retain_all_deps: boolean indicating whether all dependencies must be retained, regardless of availability; retain all deps when True, check matching build option when False """ robot = build_option("robot_path") # retain all dependencies if specified by either the resp. build option or the dedicated named argument retain_all_deps = build_option("retain_all_deps") or retain_all_deps if retain_all_deps: # assume that no modules are available when forced, to retain all dependencies avail_modules = [] _log.info("Forcing all dependencies to be retained.") else: # Get a list of all available modules (format: [(name, installversion), ...]) avail_modules = modules_tool().available() if len(avail_modules) == 0: _log.warning("No installed modules. Your MODULEPATH is probably incomplete: %s" % os.getenv("MODULEPATH")) ordered_ecs = [] # all available modules can be used for resolving dependencies except those that will be installed being_installed = [p["full_mod_name"] for p in unprocessed] avail_modules = [m for m in avail_modules if not m in being_installed] _log.debug("unprocessed before resolving deps: %s" % unprocessed) # resolve all dependencies, put a safeguard in place to avoid an infinite loop (shouldn't occur though) irresolvable = [] loopcnt = 0 maxloopcnt = 10000 while unprocessed: # make sure this stops, we really don't want to get stuck in an infinite loop loopcnt += 1 if loopcnt > maxloopcnt: raise EasyBuildError( "Maximum loop cnt %s reached, so quitting (unprocessed: %s, irresolvable: %s)", maxloopcnt, unprocessed, irresolvable, ) # first try resolving dependencies without using external dependencies last_processed_count = -1 while len(avail_modules) > last_processed_count: last_processed_count = len(avail_modules) res = find_resolved_modules(unprocessed, avail_modules, retain_all_deps=retain_all_deps) more_ecs, unprocessed, avail_modules = res for ec in more_ecs: if not ec["full_mod_name"] in [x["full_mod_name"] for x in ordered_ecs]: ordered_ecs.append(ec) # dependencies marked as external modules should be resolved via available modules at this point missing_external_modules = [ d["full_mod_name"] for ec in unprocessed for d in ec["dependencies"] if d.get("external_module", False) ] if missing_external_modules: raise EasyBuildError( "Missing modules for one or more dependencies marked as external modules: %s", missing_external_modules ) # robot: look for existing dependencies, add them if robot and unprocessed: # rely on EasyBuild module naming scheme when resolving dependencies, since we know that will # generate sensible module names that include the necessary information for the resolution to work # (name, version, toolchain, versionsuffix) being_installed = [EasyBuildMNS().det_full_module_name(p["ec"]) for p in unprocessed] additional = [] for entry in unprocessed: # do not choose an entry that is being installed in the current run # if they depend, you probably want to rebuild them using the new dependency deps = entry["dependencies"] candidates = [d for d in deps if not EasyBuildMNS().det_full_module_name(d) in being_installed] if candidates: cand_dep = candidates[0] # find easyconfig, might not find any _log.debug("Looking for easyconfig for %s" % str(cand_dep)) # note: robot_find_easyconfig may return None path = robot_find_easyconfig(cand_dep["name"], det_full_ec_version(cand_dep)) if path is None: # no easyconfig found for dependency, add to list of irresolvable dependencies if cand_dep not in irresolvable: _log.debug("Irresolvable dependency found: %s" % cand_dep) irresolvable.append(cand_dep) # remove irresolvable dependency from list of dependencies so we can continue entry["dependencies"].remove(cand_dep) else: _log.info("Robot: resolving dependency %s with %s" % (cand_dep, path)) # build specs should not be passed down to resolved dependencies, # to avoid that e.g. --try-toolchain trickles down into the used toolchain itself hidden = cand_dep.get("hidden", False) processed_ecs = process_easyconfig(path, validate=not retain_all_deps, hidden=hidden) # ensure that selected easyconfig provides required dependency mods = [spec["ec"].full_mod_name for spec in processed_ecs] dep_mod_name = ActiveMNS().det_full_module_name(cand_dep) if not dep_mod_name in mods: raise EasyBuildError( "easyconfig file %s does not contain module %s (mods: %s)", path, dep_mod_name, mods ) for ec in processed_ecs: if not ec in unprocessed + additional: additional.append(ec) _log.debug("Added %s as dependency of %s" % (ec, entry)) else: mod_name = EasyBuildMNS().det_full_module_name(entry["ec"]) _log.debug("No more candidate dependencies to resolve for %s" % mod_name) # add additional (new) easyconfigs to list of stuff to process unprocessed.extend(additional) _log.debug("Unprocessed dependencies: %s", unprocessed) elif not robot: # no use in continuing if robot is not enabled, dependencies won't be resolved anyway irresolvable = [dep for x in unprocessed for dep in x["dependencies"]] break if irresolvable: _log.warning("Irresolvable dependencies (details): %s" % irresolvable) irresolvable_mods_eb = [EasyBuildMNS().det_full_module_name(dep) for dep in irresolvable] _log.warning("Irresolvable dependencies (EasyBuild module names): %s" % ", ".join(irresolvable_mods_eb)) irresolvable_mods = [ActiveMNS().det_full_module_name(dep) for dep in irresolvable] raise EasyBuildError("Irresolvable dependencies encountered: %s", ", ".join(irresolvable_mods)) _log.info("Dependency resolution complete, building as follows: %s" % ordered_ecs) return ordered_ecs
def tweak(easyconfigs, build_specs, modtool, targetdirs=None): """Tweak list of easyconfigs according to provided build specifications.""" tweaked_ecs_path, tweaked_ecs_deps_path = None, None if targetdirs is not None: tweaked_ecs_path, tweaked_ecs_deps_path = targetdirs # make sure easyconfigs all feature the same toolchain (otherwise we *will* run into trouble) toolchains = nub(['%(name)s/%(version)s' % ec['ec']['toolchain'] for ec in easyconfigs]) if len(toolchains) > 1: raise EasyBuildError("Multiple toolchains featured in easyconfigs, --try-X not supported in that case: %s", toolchains) # Toolchain is unique, let's store it source_toolchain = easyconfigs[-1]['ec']['toolchain'] modifying_toolchains = False target_toolchain = {} src_to_dst_tc_mapping = {} revert_to_regex = False if 'toolchain_name' in build_specs or 'toolchain_version' in build_specs: keys = build_specs.keys() # Make sure there are no more build_specs, as combining --try-toolchain* with other options is currently not # supported if any(key not in ['toolchain_name', 'toolchain_version', 'toolchain'] for key in keys): warning_msg = "Combining --try-toolchain* with other build options is not fully supported: using regex" print_warning(warning_msg, silent=build_option('silent')) revert_to_regex = True if not revert_to_regex: # we're doing something with the toolchain, # so build specifications should be applied to whole dependency graph; # obtain full dependency graph for specified easyconfigs; # easyconfigs will be ordered 'top-to-bottom' (toolchains and dependencies appearing first) modifying_toolchains = True if 'toolchain_name' in keys: target_toolchain['name'] = build_specs['toolchain_name'] else: target_toolchain['name'] = source_toolchain['name'] if 'toolchain_version' in keys: target_toolchain['version'] = build_specs['toolchain_version'] else: target_toolchain['version'] = source_toolchain['version'] if build_option('map_toolchains'): try: src_to_dst_tc_mapping = map_toolchain_hierarchies(source_toolchain, target_toolchain, modtool) except EasyBuildError as err: # make sure exception was raised by match_minimum_tc_specs because toolchain mapping didn't work if "No possible mapping from source toolchain" in err.msg: error_msg = err.msg + '\n' error_msg += "Toolchain %s is not equivalent to toolchain %s in terms of capabilities. " error_msg += "(If you know what you are doing, " error_msg += "you can use --disable-map-toolchains to proceed anyway.)" raise EasyBuildError(error_msg, target_toolchain['name'], source_toolchain['name']) else: # simply re-raise the exception if something else went wrong raise err else: msg = "Mapping of (sub)toolchains disabled, so falling back to regex mode, " msg += "disabling recursion and not changing (sub)toolchains for dependencies" _log.info(msg) revert_to_regex = True modifying_toolchains = False if not revert_to_regex: _log.debug("Applying build specifications recursively (no software name/version found): %s", build_specs) orig_ecs = resolve_dependencies(easyconfigs, modtool, retain_all_deps=True) # Filter out the toolchain hierarchy (which would only appear if we are applying build_specs recursively) # We can leave any dependencies they may have as they will only be used if required (or originally listed) _log.debug("Filtering out toolchain hierarchy for %s", source_toolchain) i = 0 while i < len(orig_ecs): tc_names = [tc['name'] for tc in get_toolchain_hierarchy(source_toolchain)] if orig_ecs[i]['ec']['name'] in tc_names: # drop elements in toolchain hierarchy del orig_ecs[i] else: i += 1 else: revert_to_regex = True if revert_to_regex: # no recursion if software name/version build specification are included or we are amending something # in that case, do not construct full dependency graph orig_ecs = easyconfigs _log.debug("Software name/version found, so not applying build specifications recursively: %s" % build_specs) # keep track of originally listed easyconfigs (via their path) listed_ec_paths = [ec['spec'] for ec in easyconfigs] # generate tweaked easyconfigs, and continue with those instead tweaked_easyconfigs = [] for orig_ec in orig_ecs: # Only return tweaked easyconfigs for easyconfigs which were listed originally on the command line # (and use the prepended path so that they are found first). # easyconfig files for dependencies are also generated but not included, they will be resolved via --robot # either from existing easyconfigs or, if that fails, from easyconfigs in the appended path tc_name = orig_ec['ec']['toolchain']['name'] new_ec_file = None verification_build_specs = copy.copy(build_specs) if orig_ec['spec'] in listed_ec_paths: if modifying_toolchains: if tc_name in src_to_dst_tc_mapping: new_ec_file = map_easyconfig_to_target_tc_hierarchy(orig_ec['spec'], src_to_dst_tc_mapping, tweaked_ecs_path) # Need to update the toolchain in the build_specs to match the toolchain mapping keys = verification_build_specs.keys() if 'toolchain_name' in keys: verification_build_specs['toolchain_name'] = src_to_dst_tc_mapping[tc_name]['name'] if 'toolchain_version' in keys: verification_build_specs['toolchain_version'] = src_to_dst_tc_mapping[tc_name]['version'] if 'toolchain' in keys: verification_build_specs['toolchain'] = src_to_dst_tc_mapping[tc_name] else: new_ec_file = tweak_one(orig_ec['spec'], None, build_specs, targetdir=tweaked_ecs_path) if new_ec_file: new_ecs = process_easyconfig(new_ec_file, build_specs=verification_build_specs) tweaked_easyconfigs.extend(new_ecs) else: # Place all tweaked dependency easyconfigs in the directory appended to the robot path if modifying_toolchains: if tc_name in src_to_dst_tc_mapping: new_ec_file = map_easyconfig_to_target_tc_hierarchy(orig_ec['spec'], src_to_dst_tc_mapping, targetdir=tweaked_ecs_deps_path) else: new_ec_file = tweak_one(orig_ec['spec'], None, build_specs, targetdir=tweaked_ecs_deps_path) return tweaked_easyconfigs
def get_toolchain_hierarchy(parent_toolchain): """ Determine list of subtoolchains for specified parent toolchain. Result starts with the most minimal subtoolchains first, ends with specified toolchain. The dummy toolchain is considered the most minimal subtoolchain only if the add_dummy_to_minimal_toolchains build option is enabled. @param parent_toolchain: dictionary with name/version of parent toolchain """ # obtain list of all possible subtoolchains _, all_tc_classes = search_toolchain("") subtoolchains = dict((tc_class.NAME, getattr(tc_class, "SUBTOOLCHAIN", None)) for tc_class in all_tc_classes) current_tc_name, current_tc_version = parent_toolchain["name"], parent_toolchain["version"] subtoolchain_name, subtoolchain_version = subtoolchains[current_tc_name], None # the parent toolchain is at the top of the hierarchy toolchain_hierarchy = [parent_toolchain] while subtoolchain_name: # grab the easyconfig of the current toolchain and search the dependencies for a version of the subtoolchain path = robot_find_easyconfig(current_tc_name, current_tc_version) if path is None: raise EasyBuildError( "Could not find easyconfig for %(name)s toolchain version %(version)s", current_tc_name, current_tc_version, ) # parse the easyconfig parsed_ec = process_easyconfig(path)[0] # search the dependencies for the version of the subtoolchain dep_tcs = [ dep_toolchain["toolchain"] for dep_toolchain in parsed_ec["dependencies"] if dep_toolchain["toolchain"]["name"] == subtoolchain_name ] unique_dep_tc_versions = set([dep_tc["version"] for dep_tc in dep_tcs]) if len(unique_dep_tc_versions) == 1: subtoolchain_version = dep_tcs[0]["version"] elif len(unique_dep_tc_versions) == 0: if subtoolchain_name == DUMMY_TOOLCHAIN_NAME: subtoolchain_version = "" else: raise EasyBuildError( "No version found for subtoolchain %s in dependencies of %s", subtoolchain_name, current_tc_name ) else: raise EasyBuildError( "Multiple versions of %s found in dependencies of toolchain %s: %s", subtoolchain_name, current_tc_name, unique_dep_tc_versions, ) if subtoolchain_name == DUMMY_TOOLCHAIN_NAME and not build_option("add_dummy_to_minimal_toolchains"): # we're done break # add to hierarchy and move to next current_tc_name, current_tc_version = subtoolchain_name, subtoolchain_version subtoolchain_name, subtoolchain_version = subtoolchains[current_tc_name], None toolchain_hierarchy.insert(0, {"name": current_tc_name, "version": current_tc_version}) _log.info("Found toolchain hierarchy for toolchain %s: %s", parent_toolchain, toolchain_hierarchy) return toolchain_hierarchy
def resolve_dependencies(easyconfigs, modtool, retain_all_deps=False, raise_error_missing_ecs=True): """ Work through the list of easyconfigs to determine an optimal order :param easyconfigs: list of easyconfigs :param modtool: ModulesTool instance to use :param retain_all_deps: boolean indicating whether all dependencies must be retained, regardless of availability; retain all deps when True, check matching build option when False :param raise_error_missing_ecs: raise an error when one or more easyconfig files could not be found """ robot = build_option('robot_path') # retain all dependencies if specified by either the resp. build option or the dedicated named argument retain_all_deps = build_option('retain_all_deps') or retain_all_deps avail_modules = modtool.available() if retain_all_deps: # assume that no modules are available when forced, to retain all dependencies avail_modules = [] _log.info("Forcing all dependencies to be retained.") else: if len(avail_modules) == 0: _log.warning("No installed modules. Your MODULEPATH is probably incomplete: %s" % os.getenv('MODULEPATH')) ordered_ecs = [] # all available modules can be used for resolving dependencies except those that will be installed being_installed = [p['full_mod_name'] for p in easyconfigs] avail_modules = [m for m in avail_modules if m not in being_installed] _log.debug('easyconfigs before resolving deps: %s', easyconfigs) totally_missing, missing_easyconfigs = [], [] # resolve all dependencies, put a safeguard in place to avoid an infinite loop (shouldn't occur though) loopcnt = 0 maxloopcnt = 10000 while easyconfigs: # make sure this stops, we really don't want to get stuck in an infinite loop loopcnt += 1 if loopcnt > maxloopcnt: raise EasyBuildError("Maximum loop cnt %s reached, so quitting (easyconfigs: %s, missing_easyconfigs: %s)", maxloopcnt, easyconfigs, missing_easyconfigs) # first try resolving dependencies without using external dependencies last_processed_count = -1 while len(avail_modules) > last_processed_count: last_processed_count = len(avail_modules) res = find_resolved_modules(easyconfigs, avail_modules, modtool, retain_all_deps=retain_all_deps) resolved_ecs, easyconfigs, avail_modules = res ordered_ec_mod_names = [x['full_mod_name'] for x in ordered_ecs] for ec in resolved_ecs: # only add easyconfig if it's not included yet (based on module name) if not ec['full_mod_name'] in ordered_ec_mod_names: ordered_ecs.append(ec) # dependencies marked as external modules should be resolved via available modules at this point missing_external_modules = [d['full_mod_name'] for ec in easyconfigs for d in ec['dependencies'] if d.get('external_module', False)] if missing_external_modules: raise EasyBuildError("Missing modules for dependencies marked as external modules: %s", ', '.join(missing_external_modules)) # robot: look for existing dependencies, add them if robot and easyconfigs: # rely on EasyBuild module naming scheme when resolving dependencies, since we know that will # generate sensible module names that include the necessary information for the resolution to work # (name, version, toolchain, versionsuffix) being_installed = [EasyBuildMNS().det_full_module_name(p['ec']) for p in easyconfigs] additional = [] for entry in easyconfigs: # do not choose an entry that is being installed in the current run # if they depend, you probably want to rebuild them using the new dependency deps = entry['dependencies'] candidates = [d for d in deps if not EasyBuildMNS().det_full_module_name(d) in being_installed] if candidates: cand_dep = candidates[0] # find easyconfig, might not find any _log.debug("Looking for easyconfig for %s" % str(cand_dep)) # note: robot_find_easyconfig may return None path = robot_find_easyconfig(cand_dep['name'], det_full_ec_version(cand_dep)) if path is None: full_mod_name = ActiveMNS().det_full_module_name(cand_dep) # no easyconfig found + no module available => missing dependency if not modtool.exist([full_mod_name])[0]: if cand_dep not in totally_missing: totally_missing.append(cand_dep) # no easyconfig found for dependency, but module is available # => add to list of missing easyconfigs elif cand_dep not in missing_easyconfigs: _log.debug("Irresolvable dependency found (no easyconfig file): %s", cand_dep) missing_easyconfigs.append(cand_dep) # remove irresolvable dependency from list of dependencies so we can continue entry['dependencies'].remove(cand_dep) # add dummy entry for this dependency, so --dry-run for example can still report the dep additional.append({ 'dependencies': [], 'ec': None, 'full_mod_name': full_mod_name, 'spec': None, }) else: _log.info("Robot: resolving dependency %s with %s" % (cand_dep, path)) # build specs should not be passed down to resolved dependencies, # to avoid that e.g. --try-toolchain trickles down into the used toolchain itself hidden = cand_dep.get('hidden', False) processed_ecs = process_easyconfig(path, validate=not retain_all_deps, hidden=hidden) # ensure that selected easyconfig provides required dependency verify_easyconfig_filename(path, cand_dep, parsed_ec=processed_ecs) for ec in processed_ecs: if ec not in easyconfigs + additional: additional.append(ec) _log.debug("Added %s as dependency of %s" % (ec, entry)) else: mod_name = EasyBuildMNS().det_full_module_name(entry['ec']) _log.debug("No more candidate dependencies to resolve for %s" % mod_name) # add additional (new) easyconfigs to list of stuff to process easyconfigs.extend(additional) _log.debug("Unprocessed dependencies: %s", easyconfigs) elif not robot: # no use in continuing if robot is not enabled, dependencies won't be resolved anyway missing_deps = [dep for x in easyconfigs for dep in x['dependencies']] if missing_deps: raise_error_missing_deps(missing_deps, extra_msg="enable dependency resolution via --robot?") if totally_missing: raise_error_missing_deps(totally_missing, extra_msg="no easyconfig file or existing module found") if missing_easyconfigs: if raise_error_missing_ecs: raise_error_missing_deps(missing_easyconfigs, extra_msg="no easyconfig file found in robot search path") else: _log.warning("No easyconfig files found for: %s", missing_easyconfigs) _log.info("Dependency resolution complete, building as follows: %s", ordered_ecs) return ordered_ecs
def resolve_dependencies(unprocessed, build_specs=None, retain_all_deps=False): """ Work through the list of easyconfigs to determine an optimal order @param unprocessed: list of easyconfigs @param build_specs: dictionary specifying build specifications (e.g. version, toolchain, ...) @param retain_all_deps: boolean indicating whether all dependencies must be retained, regardless of availability; retain all deps when True, check matching build option when False """ robot = build_option('robot_path') # retain all dependencies if specified by either the resp. build option or the dedicated named argument retain_all_deps = build_option('retain_all_deps') or retain_all_deps if retain_all_deps: # assume that no modules are available when forced, to retain all dependencies avail_modules = [] _log.info("Forcing all dependencies to be retained.") else: # Get a list of all available modules (format: [(name, installversion), ...]) avail_modules = modules_tool().available() if len(avail_modules) == 0: _log.warning( "No installed modules. Your MODULEPATH is probably incomplete: %s" % os.getenv('MODULEPATH')) ordered_ecs = [] # all available modules can be used for resolving dependencies except those that will be installed being_installed = [p['full_mod_name'] for p in unprocessed] avail_modules = [m for m in avail_modules if not m in being_installed] _log.debug('unprocessed before resolving deps: %s' % unprocessed) # resolve all dependencies, put a safeguard in place to avoid an infinite loop (shouldn't occur though) irresolvable = [] loopcnt = 0 maxloopcnt = 10000 while unprocessed: # make sure this stops, we really don't want to get stuck in an infinite loop loopcnt += 1 if loopcnt > maxloopcnt: tup = (maxloopcnt, unprocessed, irresolvable) msg = "Maximum loop cnt %s reached, so quitting (unprocessed: %s, irresolvable: %s)" % tup _log.error(msg) # first try resolving dependencies without using external dependencies last_processed_count = -1 while len(avail_modules) > last_processed_count: last_processed_count = len(avail_modules) res = find_resolved_modules(unprocessed, avail_modules, retain_all_deps=retain_all_deps) more_ecs, unprocessed, avail_modules = res for ec in more_ecs: if not ec['full_mod_name'] in [ x['full_mod_name'] for x in ordered_ecs ]: ordered_ecs.append(ec) # robot: look for existing dependencies, add them if robot and unprocessed: # rely on EasyBuild module naming scheme when resolving dependencies, since we know that will # generate sensible module names that include the necessary information for the resolution to work # (name, version, toolchain, versionsuffix) being_installed = [ EasyBuildMNS().det_full_module_name(p['ec']) for p in unprocessed ] additional = [] for entry in unprocessed: # do not choose an entry that is being installed in the current run # if they depend, you probably want to rebuild them using the new dependency deps = entry['dependencies'] candidates = [ d for d in deps if not EasyBuildMNS().det_full_module_name( d) in being_installed ] if candidates: cand_dep = candidates[0] # find easyconfig, might not find any _log.debug("Looking for easyconfig for %s" % str(cand_dep)) # note: robot_find_easyconfig may return None path = robot_find_easyconfig(cand_dep['name'], det_full_ec_version(cand_dep)) if path is None: # no easyconfig found for dependency, add to list of irresolvable dependencies if cand_dep not in irresolvable: _log.debug("Irresolvable dependency found: %s" % cand_dep) irresolvable.append(cand_dep) # remove irresolvable dependency from list of dependencies so we can continue entry['dependencies'].remove(cand_dep) else: _log.info("Robot: resolving dependency %s with %s" % (cand_dep, path)) # build specs should not be passed down to resolved dependencies, # to avoid that e.g. --try-toolchain trickles down into the used toolchain itself hidden = cand_dep.get('hidden', False) processed_ecs = process_easyconfig( path, validate=not retain_all_deps, hidden=hidden) # ensure that selected easyconfig provides required dependency mods = [ spec['ec'].full_mod_name for spec in processed_ecs ] dep_mod_name = ActiveMNS().det_full_module_name( cand_dep) if not dep_mod_name in mods: tup = (path, dep_mod_name, mods) _log.error( "easyconfig file %s does not contain module %s (mods: %s)" % tup) for ec in processed_ecs: if not ec in unprocessed + additional: additional.append(ec) _log.debug("Added %s as dependency of %s" % (ec, entry)) else: mod_name = EasyBuildMNS().det_full_module_name(entry['ec']) _log.debug( "No more candidate dependencies to resolve for %s" % mod_name) # add additional (new) easyconfigs to list of stuff to process unprocessed.extend(additional) elif not robot: # no use in continuing if robot is not enabled, dependencies won't be resolved anyway irresolvable = [ dep for x in unprocessed for dep in x['dependencies'] ] break if irresolvable: _log.warning("Irresolvable dependencies (details): %s" % irresolvable) irresolvable_mods_eb = [ EasyBuildMNS().det_full_module_name(dep) for dep in irresolvable ] _log.warning("Irresolvable dependencies (EasyBuild module names): %s" % ', '.join(irresolvable_mods_eb)) irresolvable_mods = [ ActiveMNS().det_full_module_name(dep) for dep in irresolvable ] _log.error('Irresolvable dependencies encountered: %s' % ', '.join(irresolvable_mods)) _log.info("Dependency resolution complete, building as follows:\n%s" % ordered_ecs) return ordered_ecs
def list_software(output_format=FORMAT_TXT, detailed=False, only_installed=False): """ Show list of supported software :param output_format: output format to use :param detailed: whether or not to return detailed information (incl. version, versionsuffix, toolchain info) :param only_installed: only retain software for which a corresponding module is available :return: multi-line string presenting requested info """ silent = build_option('silent') ec_paths = find_matching_easyconfigs('*', '*', build_option('robot_path') or []) ecs = [] cnt = len(ec_paths) for idx, ec_path in enumerate(ec_paths): # full EasyConfig instance is only required when module name is needed # this is significantly slower (5-10x) than a 'shallow' parse via EasyConfigParser if only_installed: ec = process_easyconfig(ec_path, validate=False, parse_only=True)[0]['ec'] else: ec = EasyConfigParser(filename=ec_path).get_config_dict() ecs.append(ec) print_msg('\r', prefix=False, newline=False, silent=silent) print_msg("Processed %d/%d easyconfigs..." % (idx+1, cnt), newline=False, silent=silent) print_msg('', prefix=False, silent=silent) software = {} for ec in ecs: software.setdefault(ec['name'], []) if ec['toolchain']['name'] == DUMMY_TOOLCHAIN_NAME: toolchain = DUMMY_TOOLCHAIN_NAME else: toolchain = '%s/%s' % (ec['toolchain']['name'], ec['toolchain']['version']) keys = ['description', 'homepage', 'version', 'versionsuffix'] info = {'toolchain': toolchain} for key in keys: info[key] = ec.get(key, '') # make sure values like homepage & versionsuffix get properly templated if isinstance(ec, dict): template_values = template_constant_dict(ec, skip_lower=False) for key in keys: if '%(' in info[key]: try: info[key] = info[key] % template_values except (KeyError, TypeError, ValueError) as err: _log.debug("Ignoring failure to resolve templates: %s", err) software[ec['name']].append(info) if only_installed: software[ec['name']][-1].update({'mod_name': ec.full_mod_name}) print_msg("Found %d different software packages" % len(software), silent=silent) if only_installed: avail_mod_names = modules_tool().available() # rebuild software, only retain entries with a corresponding available module software, all_software = {}, software for key in all_software: for entry in all_software[key]: if entry['mod_name'] in avail_mod_names: software.setdefault(key, []).append(entry) print_msg("Retained %d installed software packages" % len(software), silent=silent) return generate_doc('list_software_%s' % output_format, [software, detailed])