def names(self): """Stable de-duplication of header names in the list without extensions >>> h = HeaderList(['/dir1/a.h', '/dir2/b.h', '/dir3/a.h']) >>> h.names ['a', 'b'] Returns: list of strings: A list of files without extensions """ names = [] for x in self.basenames: name = x # Valid extensions include: ['.cuh', '.hpp', '.hh', '.h'] for ext in ['.cuh', '.hpp', '.hh', '.h']: i = name.rfind(ext) if i != -1: names.append(name[:i]) break else: # No valid extension, should we still include it? names.append(name) return list(dedupe(names))
def names(self): """Stable de-duplication of library names in the list >>> l = LibraryList(['/dir1/liba.a', '/dir2/libb.a', '/dir3/liba.so']) >>> l.names ['a', 'b'] Returns: list of strings: A list of library names """ names = [] for x in self.basenames: name = x if x.startswith('lib'): name = x[3:] # Valid extensions include: ['.dylib', '.so', '.a'] for ext in ['.dylib', '.so', '.a']: i = name.rfind(ext) if i != -1: names.append(name[:i]) break else: # No valid extension, should we still include it? names.append(name) return list(dedupe(names))
def names(self): """Stable de-duplication of library names in the list >>> l = LibraryList(['/dir1/liba.a', '/dir2/libb.a', '/dir3/liba.so']) >>> assert l.names == ['a', 'b'] """ return list(dedupe(x.split('.')[0][3:] for x in self.basenames))
def hierarchy_tokens(self): """Returns the list of tokens that are part of the modulefile hierarchy. 'compiler' is always present. """ tokens = configuration(self.name).get('hierarchy', []) # Check if all the tokens in the hierarchy are virtual specs. # If not warn the user and raise an error. not_virtual = [ t for t in tokens if t != 'compiler' and not spack.repo.path.is_virtual(t) ] if not_virtual: msg = "Non-virtual specs in 'hierarchy' list for lmod: {0}\n" msg += "Please check the 'modules.yaml' configuration files" msg.format(', '.join(not_virtual)) raise NonVirtualInHierarchyError(msg) # Append 'compiler' which is always implied tokens.append('compiler') # Deduplicate tokens in case duplicates have been coded tokens = list(lang.dedupe(tokens)) return tokens
def names(self): """Stable de-duplication of file names in the list >>> h = HeaderList(['/dir1/a.h', '/dir2/b.h', '/dir3/a.h']) >>> assert h.names == ['a', 'b'] """ return list(dedupe(x.split('.')[0] for x in self.basenames))
def basenames(self): """Stable de-duplication of the base-names in the list >>> l = LibraryList(['/dir1/liba.a', '/dir2/libb.a', '/dir3/liba.a']) >>> assert l.basenames == ['liba.a', 'libb.a'] """ return list(dedupe(os.path.basename(x) for x in self.libraries))
def directories(self): """Stable de-duplication of the directories where the libraries reside >>> l = LibraryList(['/dir1/liba.a', '/dir2/libb.a', '/dir1/libc.a']) >>> assert l.directories == ['/dir1', '/dir2'] """ return list(dedupe( os.path.dirname(x) for x in self.libraries if os.path.dirname(x) ))
def directories(self): """Stable de-duplication of the directories where the libraries reside >>> l = LibraryList(['/dir1/liba.a', '/dir2/libb.a', '/dir1/libc.a']) >>> assert l.directories == ['/dir1', '/dir2'] """ return list( dedupe( os.path.dirname(x) for x in self.libraries if os.path.dirname(x)))
def get_rpaths(pkg): """Get a list of all the rpaths for a package.""" rpaths = [pkg.prefix.lib, pkg.prefix.lib64] deps = get_rpath_deps(pkg) rpaths.extend(d.prefix.lib for d in deps if os.path.isdir(d.prefix.lib)) rpaths.extend(d.prefix.lib64 for d in deps if os.path.isdir(d.prefix.lib64)) # Second module is our compiler mod name. We use that to get rpaths from # module show output. if pkg.compiler.modules and len(pkg.compiler.modules) > 1: rpaths.append(path_from_modules([pkg.compiler.modules[1]])) return list(dedupe(filter_system_paths(rpaths)))
def suffixes(self): """List of suffixes that should be appended to the module file name. """ suffixes = [] for constraint, suffix in self.conf.get('suffixes', {}).items(): if constraint in self.spec: suffixes.append(suffix) suffixes = list(dedupe(suffixes)) if self.hash: suffixes.append(self.hash) return suffixes
def basenames(self): """Stable de-duplication of the base-names in the list >>> l = LibraryList(['/dir1/liba.a', '/dir2/libb.a', '/dir3/liba.a']) >>> l.basenames ['liba.a', 'libb.a'] >>> h = HeaderList(['/dir1/a.h', '/dir2/b.h', '/dir3/a.h']) >>> h.basenames ['a.h', 'b.h'] Returns: list of strings: A list of base-names """ return list(dedupe(os.path.basename(x) for x in self.files))
def directories(self): """Stable de-duplication of the directories where the files reside. >>> l = LibraryList(['/dir1/liba.a', '/dir2/libb.a', '/dir1/libc.a']) >>> l.directories ['/dir1', '/dir2'] >>> h = HeaderList(['/dir1/a.h', '/dir1/b.h', '/dir2/c.h']) >>> h.directories ['/dir1', '/dir2'] Returns: list of strings: A list of directories """ return list( dedupe( os.path.dirname(x) for x in self.files if os.path.dirname(x)))
def directories(self): """Stable de-duplication of the directories where the files reside. >>> l = LibraryList(['/dir1/liba.a', '/dir2/libb.a', '/dir1/libc.a']) >>> l.directories ['/dir1', '/dir2'] >>> h = HeaderList(['/dir1/a.h', '/dir1/b.h', '/dir2/c.h']) >>> h.directories ['/dir1', '/dir2'] Returns: list of strings: A list of directories """ return list(dedupe( os.path.dirname(x) for x in self.files if os.path.dirname(x) ))
def hierarchy_tokens(self): """Returns the list of tokens that are part of the modulefile hierarchy. 'compiler' is always present. """ tokens = configuration.get('hierarchy', []) # Check if all the tokens in the hierarchy are virtual specs. # If not warn the user and raise an error. not_virtual = [t for t in tokens if not spack.spec.Spec.is_virtual(t)] if not_virtual: msg = "Non-virtual specs in 'hierarchy' list for lmod: {0}\n" msg += "Please check the 'modules.yaml' configuration files" msg.format(', '.join(not_virtual)) raise NonVirtualInHierarchyError(msg) # Append 'compiler' which is always implied tokens.append('compiler') # Deduplicate tokens in case duplicates have been coded tokens = list(lang.dedupe(tokens)) return tokens
def __add__(self, other): return self.__class__(dedupe(self.files + list(other)))
def __init__(self, files): if isinstance(files, six.string_types): files = [files] self.files = list(dedupe(files))
def directories(self): """Directories to be searched for header files.""" values = self._directories if values is None: values = self._default_directories() return list(dedupe(values))
def prune_duplicate_paths(paths): """Returns the paths with duplicates removed, order preserved.""" return list(dedupe(paths))
def set_build_environment_variables(pkg, env, dirty): """Ensure a clean install environment when we build packages. This involves unsetting pesky environment variables that may affect the build. It also involves setting environment variables used by Spack's compiler wrappers. Args: pkg: The package we are building env: The build environment dirty (bool): Skip unsetting the user's environment settings """ # Gather information about various types of dependencies build_deps = set(pkg.spec.dependencies(deptype=('build', 'test'))) link_deps = set(pkg.spec.traverse(root=False, deptype=('link'))) build_link_deps = build_deps | link_deps rpath_deps = get_rpath_deps(pkg) # This includes all build dependencies and any other dependencies that # should be added to PATH (e.g. supporting executables run by build # dependencies) build_and_supporting_deps = set() for build_dep in build_deps: build_and_supporting_deps.update(build_dep.traverse(deptype='run')) # Establish an arbitrary but fixed ordering of specs so that resulting # environment variable values are stable def _order(specs): return sorted(specs, key=lambda x: x.name) # External packages may be installed in a prefix which contains many other # package installs. To avoid having those installations override # Spack-installed packages, they are placed at the end of search paths. # System prefixes are removed entirely later on since they are already # searched. build_deps = _place_externals_last(_order(build_deps)) link_deps = _place_externals_last(_order(link_deps)) build_link_deps = _place_externals_last(_order(build_link_deps)) rpath_deps = _place_externals_last(_order(rpath_deps)) build_and_supporting_deps = _place_externals_last( _order(build_and_supporting_deps)) link_dirs = [] include_dirs = [] rpath_dirs = [] # The top-level package is always RPATHed. It hasn't been installed yet # so the RPATHs are added unconditionally (e.g. even though lib64/ may # not be created for the install). for libdir in ['lib', 'lib64']: lib_path = os.path.join(pkg.prefix, libdir) rpath_dirs.append(lib_path) # Set up link, include, RPATH directories that are passed to the # compiler wrapper for dep in link_deps: if is_system_path(dep.prefix): continue query = pkg.spec[dep.name] dep_link_dirs = list() try: dep_link_dirs.extend(query.libs.directories) except NoLibrariesError: tty.debug("No libraries found for {0}".format(dep.name)) for default_lib_dir in ['lib', 'lib64']: default_lib_prefix = os.path.join(dep.prefix, default_lib_dir) if os.path.isdir(default_lib_prefix): dep_link_dirs.append(default_lib_prefix) link_dirs.extend(dep_link_dirs) if dep in rpath_deps: rpath_dirs.extend(dep_link_dirs) try: include_dirs.extend(query.headers.directories) except NoHeadersError: tty.debug("No headers found for {0}".format(dep.name)) link_dirs = list(dedupe(filter_system_paths(link_dirs))) include_dirs = list(dedupe(filter_system_paths(include_dirs))) rpath_dirs = list(dedupe(filter_system_paths(rpath_dirs))) env.set(SPACK_LINK_DIRS, ':'.join(link_dirs)) env.set(SPACK_INCLUDE_DIRS, ':'.join(include_dirs)) env.set(SPACK_RPATH_DIRS, ':'.join(rpath_dirs)) build_and_supporting_prefixes = filter_system_paths( x.prefix for x in build_and_supporting_deps) build_link_prefixes = filter_system_paths(x.prefix for x in build_link_deps) # Add dependencies to CMAKE_PREFIX_PATH env.set_path('CMAKE_PREFIX_PATH', build_link_prefixes) # Set environment variables if specified for # the given compiler compiler = pkg.compiler env.extend(spack.schema.environment.parse(compiler.environment)) if compiler.extra_rpaths: extra_rpaths = ':'.join(compiler.extra_rpaths) env.set('SPACK_COMPILER_EXTRA_RPATHS', extra_rpaths) # Add bin directories from dependencies to the PATH for the build. # These directories are added to the beginning of the search path, and in # the order given by 'build_and_supporting_prefixes' (the iteration order # is reversed because each entry is prepended) for prefix in reversed(build_and_supporting_prefixes): for dirname in ['bin', 'bin64']: bin_dir = os.path.join(prefix, dirname) if os.path.isdir(bin_dir): env.prepend_path('PATH', bin_dir) # Add spack build environment path with compiler wrappers first in # the path. We add the compiler wrapper path, which includes default # wrappers (cc, c++, f77, f90), AND a subdirectory containing # compiler-specific symlinks. The latter ensures that builds that # are sensitive to the *name* of the compiler see the right name when # we're building with the wrappers. # # Conflicts on case-insensitive systems (like "CC" and "cc") are # handled by putting one in the <build_env_path>/case-insensitive # directory. Add that to the path too. env_paths = [] compiler_specific = os.path.join( spack.paths.build_env_path, os.path.dirname(pkg.compiler.link_paths['cc'])) for item in [spack.paths.build_env_path, compiler_specific]: env_paths.append(item) ci = os.path.join(item, 'case-insensitive') if os.path.isdir(ci): env_paths.append(ci) for item in env_paths: env.prepend_path('PATH', item) env.set_path(SPACK_ENV_PATH, env_paths) # Working directory for the spack command itself, for debug logs. if spack.config.get('config:debug'): env.set(SPACK_DEBUG, 'TRUE') env.set(SPACK_SHORT_SPEC, pkg.spec.short_spec) env.set(SPACK_DEBUG_LOG_ID, pkg.spec.format('{name}-{hash:7}')) env.set(SPACK_DEBUG_LOG_DIR, spack.main.spack_working_dir) # Find ccache binary and hand it to build environment if spack.config.get('config:ccache'): ccache = Executable('ccache') if not ccache: raise RuntimeError("No ccache binary found in PATH") env.set(SPACK_CCACHE_BINARY, ccache) # Add any pkgconfig directories to PKG_CONFIG_PATH for prefix in reversed(build_link_prefixes): for directory in ('lib', 'lib64', 'share'): pcdir = os.path.join(prefix, directory, 'pkgconfig') if os.path.isdir(pcdir): env.prepend_path('PKG_CONFIG_PATH', pcdir) return env
def from_sourcing_file(filename, *args, **kwargs): """Returns modifications that would be made by sourcing a file. Parameters: filename (str): The file to source *args (list of str): Arguments to pass on the command line Keyword Arguments: shell (str): The shell to use (default: ``bash``) shell_options (str): Options passed to the shell (default: ``-c``) source_command (str): The command to run (default: ``source``) suppress_output (str): Redirect used to suppress output of command (default: ``&> /dev/null``) concatenate_on_success (str): Operator used to execute a command only when the previous command succeeds (default: ``&&``) blacklist ([str or re]): Ignore any modifications of these variables (default: []) whitelist ([str or re]): Always respect modifications of these variables (default: []). Has precedence over blacklist. clean (bool): In addition to removing empty entries, also remove duplicate entries (default: False). Returns: EnvironmentModifications: an object that, if executed, has the same effect on the environment as sourcing the file """ # Check if the file actually exists if not os.path.isfile(filename): msg = 'Trying to source non-existing file: {0}'.format(filename) raise RuntimeError(msg) # Kwargs parsing and default values shell = kwargs.get('shell', '/bin/bash') shell_options = kwargs.get('shell_options', '-c') source_command = kwargs.get('source_command', 'source') suppress_output = kwargs.get('suppress_output', '&> /dev/null') concatenate_on_success = kwargs.get('concatenate_on_success', '&&') blacklist = kwargs.get('blacklist', []) whitelist = kwargs.get('whitelist', []) clean = kwargs.get('clean', False) source_file = [source_command, filename] source_file.extend(args) source_file = ' '.join(source_file) dump_cmd = 'import os, json; print(json.dumps(dict(os.environ)))' dump_environment = 'python -c "{0}"'.format(dump_cmd) # Construct the command that will be executed command = [ shell, shell_options, ' '.join([ source_file, suppress_output, concatenate_on_success, dump_environment, ]), ] # Try to source the file proc = subprocess.Popen( command, stdout=subprocess.PIPE, env=os.environ) proc.wait() if proc.returncode != 0: msg = 'Sourcing file {0} returned a non-zero exit code'.format( filename) raise RuntimeError(msg) output = ''.join([line.decode('utf-8') for line in proc.stdout]) # Construct dictionaries of the environment before and after # sourcing the file, so that we can diff them. env_before = dict(os.environ) env_after = json.loads(output) # If we're in python2, convert to str objects instead of unicode # like json gives us. We can't put unicode in os.environ anyway. if sys.version_info[0] < 3: env_after = dict((k.encode('utf-8'), v.encode('utf-8')) for k, v in env_after.items()) # Other variables unrelated to sourcing a file blacklist.extend(['SHLVL', '_', 'PWD', 'OLDPWD', 'PS2']) def set_intersection(fullset, *args): # A set intersection using string literals and regexs meta = '[' + re.escape('[$()*?[]^{|}') + ']' subset = fullset & set(args) # As literal for name in args: if re.search(meta, name): pattern = re.compile(name) for k in fullset: if re.match(pattern, k): subset.add(k) return subset for d in env_after, env_before: # Retain (whitelist) has priority over prune (blacklist) prune = set_intersection(set(d), *blacklist) prune -= set_intersection(prune, *whitelist) for k in prune: d.pop(k, None) # Fill the EnvironmentModifications instance env = EnvironmentModifications() # New variables new_variables = list(set(env_after) - set(env_before)) # Variables that have been unset unset_variables = list(set(env_before) - set(env_after)) # Variables that have been modified common_variables = set(env_before).intersection(set(env_after)) modified_variables = [x for x in common_variables if env_before[x] != env_after[x]] # Consistent output order - looks nicer, easier comparison... new_variables.sort() unset_variables.sort() modified_variables.sort() def return_separator_if_any(*args): separators = ':', ';' for separator in separators: for arg in args: if separator in arg: return separator return None # Add variables to env. # Assume that variables with 'PATH' in the name or that contain # separators like ':' or ';' are more likely to be paths for x in new_variables: sep = return_separator_if_any(env_after[x]) if sep: env.prepend_path(x, env_after[x], separator=sep) elif 'PATH' in x: env.prepend_path(x, env_after[x]) else: # We just need to set the variable to the new value env.set(x, env_after[x]) for x in unset_variables: env.unset(x) for x in modified_variables: before = env_before[x] after = env_after[x] sep = return_separator_if_any(before, after) if sep: before_list = before.split(sep) after_list = after.split(sep) # Filter out empty strings before_list = list(filter(None, before_list)) after_list = list(filter(None, after_list)) # Remove duplicate entries (worse matching, bloats env) if clean: before_list = list(dedupe(before_list)) after_list = list(dedupe(after_list)) # The reassembled cleaned entries before = sep.join(before_list) after = sep.join(after_list) # Paths that have been removed remove_list = [ ii for ii in before_list if ii not in after_list] # Check that nothing has been added in the middle of # before_list remaining_list = [ ii for ii in before_list if ii in after_list] try: start = after_list.index(remaining_list[0]) end = after_list.index(remaining_list[-1]) search = sep.join(after_list[start:end + 1]) except IndexError: env.prepend_path(x, after) if search not in before: # We just need to set the variable to the new value env.prepend_path(x, after) else: try: prepend_list = after_list[:start] prepend_list.reverse() # Preserve order after prepend except KeyError: prepend_list = [] try: append_list = after_list[end + 1:] except KeyError: append_list = [] for item in remove_list: env.remove_path(x, item) for item in append_list: env.append_path(x, item) for item in prepend_list: env.prepend_path(x, item) else: # We just need to set the variable to the new value env.set(x, after) return env
def set_build_environment_variables(pkg, env, dirty): """Ensure a clean install environment when we build packages. This involves unsetting pesky environment variables that may affect the build. It also involves setting environment variables used by Spack's compiler wrappers. Args: pkg: The package we are building env: The build environment dirty (bool): Skip unsetting the user's environment settings """ # Gather information about various types of dependencies build_deps = set(pkg.spec.dependencies(deptype=('build', 'test'))) link_deps = set(pkg.spec.traverse(root=False, deptype=('link'))) build_link_deps = build_deps | link_deps rpath_deps = get_rpath_deps(pkg) link_dirs = [] include_dirs = [] rpath_dirs = [] # The top-level package is always RPATHed. It hasn't been installed yet # so the RPATHs are added unconditionally (e.g. even though lib64/ may # not be created for the install). for libdir in ['lib', 'lib64']: lib_path = os.path.join(pkg.prefix, libdir) rpath_dirs.append(lib_path) # Set up link, include, RPATH directories that are passed to the # compiler wrapper for dep in link_deps: if is_system_path(dep.prefix): continue query = pkg.spec[dep.name] dep_link_dirs = list() try: dep_link_dirs.extend(query.libs.directories) except NoLibrariesError: tty.debug("No libraries found for {0}".format(dep.name)) for default_lib_dir in ['lib', 'lib64']: default_lib_prefix = os.path.join(dep.prefix, default_lib_dir) if os.path.isdir(default_lib_prefix): dep_link_dirs.append(default_lib_prefix) link_dirs.extend(dep_link_dirs) if dep in rpath_deps: rpath_dirs.extend(dep_link_dirs) try: include_dirs.extend(query.headers.directories) except NoHeadersError: tty.debug("No headers found for {0}".format(dep.name)) link_dirs = list(dedupe(filter_system_paths(link_dirs))) include_dirs = list(dedupe(filter_system_paths(include_dirs))) rpath_dirs = list(dedupe(filter_system_paths(rpath_dirs))) env.set(SPACK_LINK_DIRS, ':'.join(link_dirs)) env.set(SPACK_INCLUDE_DIRS, ':'.join(include_dirs)) env.set(SPACK_RPATH_DIRS, ':'.join(rpath_dirs)) build_prefixes = [dep.prefix for dep in build_deps] build_link_prefixes = [dep.prefix for dep in build_link_deps] # add run-time dependencies of direct build-time dependencies: for build_dep in build_deps: for run_dep in build_dep.traverse(deptype='run'): build_prefixes.append(run_dep.prefix) # Filter out system paths: ['/', '/usr', '/usr/local'] # These paths can be introduced into the build when an external package # is added as a dependency. The problem with these paths is that they often # contain hundreds of other packages installed in the same directory. # If these paths come first, they can overshadow Spack installations. build_prefixes = filter_system_paths(build_prefixes) build_link_prefixes = filter_system_paths(build_link_prefixes) # Add dependencies to CMAKE_PREFIX_PATH env.set_path('CMAKE_PREFIX_PATH', build_link_prefixes) # Set environment variables if specified for # the given compiler compiler = pkg.compiler env.extend(spack.schema.environment.parse(compiler.environment)) if compiler.extra_rpaths: extra_rpaths = ':'.join(compiler.extra_rpaths) env.set('SPACK_COMPILER_EXTRA_RPATHS', extra_rpaths) # Add bin directories from dependencies to the PATH for the build. for prefix in build_prefixes: for dirname in ['bin', 'bin64']: bin_dir = os.path.join(prefix, dirname) if os.path.isdir(bin_dir): env.prepend_path('PATH', bin_dir) # Add spack build environment path with compiler wrappers first in # the path. We add the compiler wrapper path, which includes default # wrappers (cc, c++, f77, f90), AND a subdirectory containing # compiler-specific symlinks. The latter ensures that builds that # are sensitive to the *name* of the compiler see the right name when # we're building with the wrappers. # # Conflicts on case-insensitive systems (like "CC" and "cc") are # handled by putting one in the <build_env_path>/case-insensitive # directory. Add that to the path too. env_paths = [] compiler_specific = os.path.join( spack.paths.build_env_path, os.path.dirname(pkg.compiler.link_paths['cc'])) for item in [spack.paths.build_env_path, compiler_specific]: env_paths.append(item) ci = os.path.join(item, 'case-insensitive') if os.path.isdir(ci): env_paths.append(ci) for item in env_paths: env.prepend_path('PATH', item) env.set_path(SPACK_ENV_PATH, env_paths) # Working directory for the spack command itself, for debug logs. if spack.config.get('config:debug'): env.set(SPACK_DEBUG, 'TRUE') env.set(SPACK_SHORT_SPEC, pkg.spec.short_spec) env.set(SPACK_DEBUG_LOG_ID, pkg.spec.format('{name}-{hash:7}')) env.set(SPACK_DEBUG_LOG_DIR, spack.main.spack_working_dir) # Find ccache binary and hand it to build environment if spack.config.get('config:ccache'): ccache = Executable('ccache') if not ccache: raise RuntimeError("No ccache binary found in PATH") env.set(SPACK_CCACHE_BINARY, ccache) # Add any pkgconfig directories to PKG_CONFIG_PATH for prefix in build_link_prefixes: for directory in ('lib', 'lib64', 'share'): pcdir = os.path.join(prefix, directory, 'pkgconfig') if os.path.isdir(pcdir): env.prepend_path('PKG_CONFIG_PATH', pcdir) return env
def test_dedupe(): assert [x for x in dedupe([1, 2, 1, 3, 2])] == [1, 2, 3] assert [x for x in dedupe([1, -2, 1, 3, 2], key=abs)] == [1, -2, 3]
def __add__(self, other): return LibraryList(dedupe(self.libraries + list(other)))
def unlocked_paths(self): """Returns a dictionary mapping conditions to a list of unlocked paths. The paths that are unconditionally unlocked are under the key 'None'. The other keys represent the list of services you need loaded to unlock the corresponding paths. """ unlocked = collections.defaultdict(list) # Get the list of services we require and we provide requires_key = list(self.conf.requires) provides_key = list(self.conf.provides) # A compiler is always required. To avoid duplication pop the # 'compiler' item from required if we also **provide** one if 'compiler' in provides_key: requires_key.remove('compiler') # Compute the unique combinations of the services we provide combinations = [] for ii in range(len(provides_key)): combinations += itertools.combinations(provides_key, ii + 1) # Attach the services required to each combination to_be_processed = [x + tuple(requires_key) for x in combinations] # Compute the paths that are unconditionally added # and append them to the dictionary (key = None) available_combination = [] for item in to_be_processed: hierarchy = self.conf.hierarchy_tokens available = self.conf.available ac = [x for x in hierarchy if x in item] available_combination.append(tuple(ac)) parts = [self.token_to_path(x, available[x]) for x in ac] unlocked[None].append(tuple([self.arch_dirname] + parts)) # Deduplicate the list unlocked[None] = list(lang.dedupe(unlocked[None])) # Compute the combination of missing requirements: this will lead to # paths that are unlocked conditionally missing = self.conf.missing missing_combinations = [] for ii in range(len(missing)): missing_combinations += itertools.combinations(missing, ii + 1) # Attach the services required to each combination for m in missing_combinations: to_be_processed = [m + x for x in available_combination] for item in to_be_processed: hierarchy = self.conf.hierarchy_tokens available = self.conf.available token2path = lambda x: self.token_to_path(x, available[x]) parts = [] for x in hierarchy: if x not in item: continue value = token2path(x) if x in available else x parts.append(value) unlocked[m].append(tuple([self.arch_dirname] + parts)) # Deduplicate the list unlocked[m] = list(lang.dedupe(unlocked[m])) return unlocked
def from_environment_diff(before, after, clean=False): """Constructs an instance of a :py:class:`spack.util.environment.EnvironmentModifications` object from the diff of two dictionaries. Args: before (dict): environment before the modifications are applied after (dict): environment after the modifications are applied clean (bool): in addition to removing empty entries, also remove duplicate entries """ # Fill the EnvironmentModifications instance env = EnvironmentModifications() # New variables new_variables = list(set(after) - set(before)) # Variables that have been unset unset_variables = list(set(before) - set(after)) # Variables that have been modified common_variables = set(before).intersection(set(after)) modified_variables = [ x for x in common_variables if before[x] != after[x] ] # Consistent output order - looks nicer, easier comparison... new_variables.sort() unset_variables.sort() modified_variables.sort() def return_separator_if_any(*args): separators = ':', ';' for separator in separators: for arg in args: if separator in arg: return separator return None # Add variables to env. # Assume that variables with 'PATH' in the name or that contain # separators like ':' or ';' are more likely to be paths for x in new_variables: sep = return_separator_if_any(after[x]) if sep: env.prepend_path(x, after[x], separator=sep) elif 'PATH' in x: env.prepend_path(x, after[x]) else: # We just need to set the variable to the new value env.set(x, after[x]) for x in unset_variables: env.unset(x) for x in modified_variables: value_before = before[x] value_after = after[x] sep = return_separator_if_any(value_before, value_after) if sep: before_list = value_before.split(sep) after_list = value_after.split(sep) # Filter out empty strings before_list = list(filter(None, before_list)) after_list = list(filter(None, after_list)) # Remove duplicate entries (worse matching, bloats env) if clean: before_list = list(dedupe(before_list)) after_list = list(dedupe(after_list)) # The reassembled cleaned entries value_before = sep.join(before_list) value_after = sep.join(after_list) # Paths that have been removed remove_list = [ ii for ii in before_list if ii not in after_list ] # Check that nothing has been added in the middle of # before_list remaining_list = [ii for ii in before_list if ii in after_list] try: start = after_list.index(remaining_list[0]) end = after_list.index(remaining_list[-1]) search = sep.join(after_list[start:end + 1]) except IndexError: env.prepend_path(x, value_after) continue if search not in value_before: # We just need to set the variable to the new value env.prepend_path(x, value_after) else: try: prepend_list = after_list[:start] prepend_list.reverse() # Preserve order after prepend except KeyError: prepend_list = [] try: append_list = after_list[end + 1:] except KeyError: append_list = [] for item in remove_list: env.remove_path(x, item) for item in append_list: env.append_path(x, item) for item in prepend_list: env.prepend_path(x, item) else: # We just need to set the variable to the new value env.set(x, value_after) return env
def from_sourcing_file(filename, *args, **kwargs): """Returns modifications that would be made by sourcing a file. Parameters: filename (str): The file to source *args (list of str): Arguments to pass on the command line Keyword Arguments: shell (str): The shell to use (default: ``bash``) shell_options (str): Options passed to the shell (default: ``-c``) source_command (str): The command to run (default: ``source``) suppress_output (str): Redirect used to suppress output of command (default: ``&> /dev/null``) concatenate_on_success (str): Operator used to execute a command only when the previous command succeeds (default: ``&&``) blacklist ([str or re]): Ignore any modifications of these variables (default: []) whitelist ([str or re]): Always respect modifications of these variables (default: []). Has precedence over blacklist. clean (bool): In addition to removing empty entries, also remove duplicate entries (default: False). Returns: EnvironmentModifications: an object that, if executed, has the same effect on the environment as sourcing the file """ # Check if the file actually exists if not os.path.isfile(filename): msg = 'Trying to source non-existing file: {0}'.format(filename) raise RuntimeError(msg) # Kwargs parsing and default values shell = kwargs.get('shell', '/bin/bash') shell_options = kwargs.get('shell_options', '-c') source_command = kwargs.get('source_command', 'source') suppress_output = kwargs.get('suppress_output', '&> /dev/null') concatenate_on_success = kwargs.get('concatenate_on_success', '&&') blacklist = kwargs.get('blacklist', []) whitelist = kwargs.get('whitelist', []) clean = kwargs.get('clean', False) source_file = [source_command, filename] source_file.extend(args) source_file = ' '.join(source_file) dump_cmd = 'import os, json; print(json.dumps(dict(os.environ)))' dump_environment = 'python -c "{0}"'.format(dump_cmd) # Construct the command that will be executed command = [ shell, shell_options, ' '.join([ source_file, suppress_output, concatenate_on_success, dump_environment, ]), ] # Try to source the file proc = subprocess.Popen(command, stdout=subprocess.PIPE, env=os.environ) proc.wait() if proc.returncode != 0: msg = 'Sourcing file {0} returned a non-zero exit code'.format( filename) raise RuntimeError(msg) output = ''.join([line.decode('utf-8') for line in proc.stdout]) # Construct dictionaries of the environment before and after # sourcing the file, so that we can diff them. env_before = dict(os.environ) env_after = json.loads(output) # If we're in python2, convert to str objects instead of unicode # like json gives us. We can't put unicode in os.environ anyway. if sys.version_info[0] < 3: env_after = dict((k.encode('utf-8'), v.encode('utf-8')) for k, v in env_after.items()) # Other variables unrelated to sourcing a file blacklist.extend(['SHLVL', '_', 'PWD', 'OLDPWD', 'PS2']) def set_intersection(fullset, *args): # A set intersection using string literals and regexs meta = '[' + re.escape('[$()*?[]^{|}') + ']' subset = fullset & set(args) # As literal for name in args: if re.search(meta, name): pattern = re.compile(name) for k in fullset: if re.match(pattern, k): subset.add(k) return subset for d in env_after, env_before: # Retain (whitelist) has priority over prune (blacklist) prune = set_intersection(set(d), *blacklist) prune -= set_intersection(prune, *whitelist) for k in prune: d.pop(k, None) # Fill the EnvironmentModifications instance env = EnvironmentModifications() # New variables new_variables = list(set(env_after) - set(env_before)) # Variables that have been unset unset_variables = list(set(env_before) - set(env_after)) # Variables that have been modified common_variables = set(env_before).intersection(set(env_after)) modified_variables = [ x for x in common_variables if env_before[x] != env_after[x] ] # Consistent output order - looks nicer, easier comparison... new_variables.sort() unset_variables.sort() modified_variables.sort() def return_separator_if_any(*args): separators = ':', ';' for separator in separators: for arg in args: if separator in arg: return separator return None # Add variables to env. # Assume that variables with 'PATH' in the name or that contain # separators like ':' or ';' are more likely to be paths for x in new_variables: sep = return_separator_if_any(env_after[x]) if sep: env.prepend_path(x, env_after[x], separator=sep) elif 'PATH' in x: env.prepend_path(x, env_after[x]) else: # We just need to set the variable to the new value env.set(x, env_after[x]) for x in unset_variables: env.unset(x) for x in modified_variables: before = env_before[x] after = env_after[x] sep = return_separator_if_any(before, after) if sep: before_list = before.split(sep) after_list = after.split(sep) # Filter out empty strings before_list = list(filter(None, before_list)) after_list = list(filter(None, after_list)) # Remove duplicate entries (worse matching, bloats env) if clean: before_list = list(dedupe(before_list)) after_list = list(dedupe(after_list)) # The reassembled cleaned entries before = sep.join(before_list) after = sep.join(after_list) # Paths that have been removed remove_list = [ ii for ii in before_list if ii not in after_list ] # Check that nothing has been added in the middle of # before_list remaining_list = [ii for ii in before_list if ii in after_list] try: start = after_list.index(remaining_list[0]) end = after_list.index(remaining_list[-1]) search = sep.join(after_list[start:end + 1]) except IndexError: env.prepend_path(x, after) if search not in before: # We just need to set the variable to the new value env.prepend_path(x, after) else: try: prepend_list = after_list[:start] prepend_list.reverse() # Preserve order after prepend except KeyError: prepend_list = [] try: append_list = after_list[end + 1:] except KeyError: append_list = [] for item in remove_list: env.remove_path(x, item) for item in append_list: env.append_path(x, item) for item in prepend_list: env.prepend_path(x, item) else: # We just need to set the variable to the new value env.set(x, after) return env