def init_compiler_config(): """Compiler search used when Spack has no compilers.""" config[arch] = {} compilers = find_compilers(*get_path('PATH')) for compiler in compilers: config[arch].update(_to_dict(compiler)) spack.config.update_config('compilers', config, scope=scope)
def clean_environment(): # Stuff in here sanitizes the build environment to eliminate # anything the user has set that may interfere. We apply it immediately # unlike the other functions so it doesn't overwrite what the modules load. env = EnvironmentModifications() # Remove these vars from the environment during build because they # can affect how some packages find libraries. We want to make # sure that builds never pull in unintended external dependencies. env.unset('LD_LIBRARY_PATH') env.unset('LIBRARY_PATH') env.unset('CPATH') env.unset('LD_RUN_PATH') env.unset('DYLD_LIBRARY_PATH') build_lang = spack.config.get('config:build_language') if build_lang: # Override language-related variables. This can be used to force # English compiler messages etc., which allows parse_log_events to # show useful matches. env.set('LC_ALL', build_lang) # Remove any macports installs from the PATH. The macports ld can # cause conflicts with the built-in linker on el capitan. Solves # assembler issues, e.g.: # suffix or operands invalid for `movq'" path = get_path('PATH') for p in path: if '/macports/' in p: env.remove_path('PATH', p) env.apply_modifications()
def compiler_find(args): """Search either $PATH or a list of paths OR MODULES for compilers and add them to Spack's configuration. """ paths = args.add_paths if not paths: paths = get_path('PATH') # Don't initialize compilers config via compilers.get_compiler_config. # Just let compiler_find do the # entire process and return an empty config from all_compilers # Default for any other process is init_config=True compilers = [c for c in spack.compilers.find_compilers(*paths)] new_compilers = [] for c in compilers: arch_spec = ArchSpec(None, c.operating_system, c.target) same_specs = spack.compilers.compilers_for_spec( c.spec, arch_spec, args.scope) if not same_specs: new_compilers.append(c) if new_compilers: spack.compilers.add_compilers_to_config(new_compilers, scope=args.scope, init_config=False) n = len(new_compilers) s = 's' if n > 1 else '' filename = spack.config.get_config_filename(args.scope, 'compilers') tty.msg("Added %d new compiler%s to %s" % (n, s, filename)) colify(reversed(sorted(c.spec for c in new_compilers)), indent=4) else: tty.msg("Found no new compilers")
def compiler_find(args): """Search either $PATH or a list of paths OR MODULES for compilers and add them to Spack's configuration. """ paths = args.add_paths if not paths: paths = get_path('PATH') # Don't initialize compilers config via compilers.get_compiler_config. # Just let compiler_find do the # entire process and return an empty config from all_compilers # Default for any other process is init_config=True compilers = [c for c in spack.compilers.find_compilers(*paths)] new_compilers = [] for c in compilers: arch_spec = ArchSpec(None, c.operating_system, c.target) same_specs = spack.compilers.compilers_for_spec(c.spec, arch_spec, args.scope) if not same_specs: new_compilers.append(c) if new_compilers: spack.compilers.add_compilers_to_config(new_compilers, scope=args.scope, init_config=False) n = len(new_compilers) s = 's' if n > 1 else '' filename = spack.config.get_config_filename(args.scope, 'compilers') tty.msg("Added %d new compiler%s to %s" % (n, s, filename)) colify(reversed(sorted(c.spec for c in new_compilers)), indent=4) else: tty.msg("Found no new compilers")
def clean_environment(): # Stuff in here sanitizes the build environment to eliminate # anything the user has set that may interfere. We apply it immediately # unlike the other functions so it doesn't overwrite what the modules load. env = EnvironmentModifications() # Remove these vars from the environment during build because they # can affect how some packages find libraries. We want to make # sure that builds never pull in unintended external dependencies. env.unset('LD_LIBRARY_PATH') env.unset('LIBRARY_PATH') env.unset('CPATH') env.unset('LD_RUN_PATH') env.unset('DYLD_LIBRARY_PATH') # Remove any macports installs from the PATH. The macports ld can # cause conflicts with the built-in linker on el capitan. Solves # assembler issues, e.g.: # suffix or operands invalid for `movq'" path = get_path('PATH') for p in path: if '/macports/' in p: env.remove_path('PATH', p) env.apply_modifications()
def compiler_search_paths(self): """Calls the default function but unloads Cray's programming environments first. This prevents from detecting Cray compiler wrappers and avoids possible false detections. """ with unload_programming_environment(): search_paths = fs.search_paths_for_executables(*get_path('PATH')) return search_paths
def _find_matches_in_path(cls, compiler_names, detect_version, *path): """Finds compilers in the paths supplied. Looks for all combinations of ``compiler_names`` with the ``prefixes`` and ``suffixes`` defined for this compiler class. If any compilers match the compiler_names, prefixes, or suffixes, uses ``detect_version`` to figure out what version the compiler is. This returns a dict with compilers grouped by (prefix, suffix, version) tuples. This can be further organized by find(). """ if not path: path = get_path('PATH') prefixes = [''] + cls.prefixes suffixes = [''] + cls.suffixes checks = [] for directory in path: if not (os.path.isdir(directory) and os.access(directory, os.R_OK | os.X_OK)): continue files = os.listdir(directory) for exe in files: full_path = join_path(directory, exe) prod = itertools.product(prefixes, compiler_names, suffixes) for pre, name, suf in prod: regex = r'^(%s)%s(%s)$' % (pre, re.escape(name), suf) match = re.match(regex, exe) if match: key = (full_path, ) + match.groups() checks.append(key) def check(key): try: full_path, prefix, suffix = key version = detect_version(full_path) return (version, prefix, suffix, full_path) except ProcessError, e: tty.debug("Couldn't get version for compiler %s" % full_path, e) return None except Exception, e: # Catching "Exception" here is fine because it just # means something went wrong running a candidate executable. tty.debug( "Error while executing candidate compiler %s" % full_path, "%s: %s" % (e.__class__.__name__, e)) return None
def _find_matches_in_path(cls, compiler_names, detect_version, *path): """Finds compilers in the paths supplied. Looks for all combinations of ``compiler_names`` with the ``prefixes`` and ``suffixes`` defined for this compiler class. If any compilers match the compiler_names, prefixes, or suffixes, uses ``detect_version`` to figure out what version the compiler is. This returns a dict with compilers grouped by (prefix, suffix, version) tuples. This can be further organized by find(). """ if not path: path = get_path('PATH') prefixes = [''] + cls.prefixes suffixes = [''] + cls.suffixes checks = [] for directory in path: if not (os.path.isdir(directory) and os.access(directory, os.R_OK | os.X_OK)): continue files = os.listdir(directory) for exe in files: full_path = join_path(directory, exe) prod = itertools.product(prefixes, compiler_names, suffixes) for pre, name, suf in prod: regex = r'^(%s)%s(%s)$' % (pre, re.escape(name), suf) match = re.match(regex, exe) if match: key = (full_path,) + match.groups() checks.append(key) def check(key): try: full_path, prefix, suffix = key version = detect_version(full_path) return (version, prefix, suffix, full_path) except ProcessError, e: tty.debug( "Couldn't get version for compiler %s" % full_path, e) return None except Exception, e: # Catching "Exception" here is fine because it just # means something went wrong running a candidate executable. tty.debug("Error while executing candidate compiler %s" % full_path, "%s: %s" % (e.__class__.__name__, e)) return None
def find_compilers(path_hints=None): """Returns the list of compilers found in the paths given as arguments. Args: path_hints (list or None): list of path hints where to look for. A sensible default based on the ``PATH`` environment variable will be used if the value is None Returns: List of compilers found """ if path_hints is None: path_hints = get_path('PATH') default_paths = fs.search_paths_for_executables(*path_hints) # To detect the version of the compilers, we dispatch a certain number # of function calls to different workers. Here we construct the list # of arguments for each call. arguments = [] for o in all_os_classes(): search_paths = getattr(o, 'compiler_search_paths', default_paths) arguments.extend(arguments_to_detect_version_fn(o, search_paths)) # Here we map the function arguments to the corresponding calls tp = multiprocessing.pool.ThreadPool() try: detected_versions = tp.map(detect_version, arguments) finally: tp.close() def valid_version(item): value, error = item if error is None: return True try: # This will fail on Python 2.6 if a non ascii # character is in the error tty.debug(error) except UnicodeEncodeError: pass return False def remove_errors(item): value, _ = item return value return make_compiler_list( map(remove_errors, filter(valid_version, detected_versions)) )
def _find_matches_in_path(cls, compiler_names, detect_version, *path): """Finds compilers in the paths supplied. Looks for all combinations of ``compiler_names`` with the ``prefixes`` and ``suffixes`` defined for this compiler class. If any compilers match the compiler_names, prefixes, or suffixes, uses ``detect_version`` to figure out what version the compiler is. This returns a dict with compilers grouped by (prefix, suffix, version) tuples. This can be further organized by find(). """ if not path: path = get_path('PATH') prefixes = [''] + cls.prefixes suffixes = [''] + cls.suffixes checks = [] for directory in path: if not (os.path.isdir(directory) and os.access(directory, os.R_OK | os.X_OK)): continue files = os.listdir(directory) for exe in files: full_path = os.path.join(directory, exe) prod = itertools.product(prefixes, compiler_names, suffixes) for pre, name, suf in prod: regex = r'^(%s)%s(%s)$' % (pre, re.escape(name), suf) match = re.match(regex, exe) if match: key = (full_path, ) + match.groups() + ( detect_version, ) checks.append(key) successful = [ k for k in mp.parmap(_get_versioned_tuple, checks) if k is not None ] # The 'successful' list is ordered like the input paths. # Reverse it here so that the dict creation (last insert wins) # does not spoil the intented precedence. successful.reverse() return dict(((v, p, s), path) for v, p, s, path in successful)
def compiler_search_paths(self): """Calls the default function but unloads Cray's programming environments first. This prevents from detecting Cray compiler wrappers and avoids possible false detections. """ import spack.compilers with unload_programming_environment(): search_paths = get_path('PATH') extract_path_re = re.compile(r'prepend-path[\s]*PATH[\s]*([/\w\.:-]*)') for compiler_cls in spack.compilers.all_compiler_types(): # Check if the compiler class is supported on Cray prg_env = getattr(compiler_cls, 'PrgEnv', None) compiler_module = getattr(compiler_cls, 'PrgEnv_compiler', None) if not (prg_env and compiler_module): continue # It is supported, check which versions are available output = module('avail', compiler_cls.PrgEnv_compiler) version_regex = r'({0})/([\d\.]+[\d]-?[\w]*)'.format( compiler_cls.PrgEnv_compiler ) matches = re.findall(version_regex, output) versions = tuple(version for _, version in matches if 'classic' not in version) # Now inspect the modules and add to paths msg = "[CRAY FE] Detected FE compiler [name={0}, versions={1}]" tty.debug(msg.format(compiler_module, versions)) for v in versions: try: current_module = compiler_module + '/' + v out = module('show', current_module) match = extract_path_re.search(out) search_paths += match.group(1).split(':') except Exception as e: msg = ("[CRAY FE] An unexpected error occurred while " "detecting FE compiler [compiler={0}, " " version={1}, error={2}]") tty.debug(msg.format(compiler_cls.name, v, str(e))) search_paths = list(llnl.util.lang.dedupe(search_paths)) return fs.search_paths_for_executables(*search_paths)
def _get_config(): """Get a Spack config, but make sure it has compiler configuration first.""" # If any configuration file has compilers, just stick with the # ones already configured. config = spack.config.get_compilers_config() existing = [spack.spec.CompilerSpec(s) for s in config] if existing: return config compilers = find_compilers(*get_path('PATH')) add_compilers_to_config('user', *compilers) # After writing compilers to the user config, return a full config # from all files. return spack.config.get_compilers_config()
def _find_matches_in_path(cls, compiler_names, detect_version, *path): """Finds compilers in the paths supplied. Looks for all combinations of ``compiler_names`` with the ``prefixes`` and ``suffixes`` defined for this compiler class. If any compilers match the compiler_names, prefixes, or suffixes, uses ``detect_version`` to figure out what version the compiler is. This returns a dict with compilers grouped by (prefix, suffix, version) tuples. This can be further organized by find(). """ if not path: path = get_path('PATH') prefixes = [''] + cls.prefixes suffixes = [''] + cls.suffixes checks = [] for directory in path: if not (os.path.isdir(directory) and os.access(directory, os.R_OK | os.X_OK)): continue files = os.listdir(directory) for exe in files: full_path = os.path.join(directory, exe) prod = itertools.product(prefixes, compiler_names, suffixes) for pre, name, suf in prod: regex = r'^(%s)%s(%s)$' % (pre, re.escape(name), suf) match = re.match(regex, exe) if match: key = (full_path,) + match.groups() + (detect_version,) checks.append(key) successful = [k for k in mp.parmap(_get_versioned_tuple, checks) if k is not None] # The 'successful' list is ordered like the input paths. # Reverse it here so that the dict creation (last insert wins) # does not spoil the intented precedence. successful.reverse() return dict(((v, p, s), path) for v, p, s, path in successful)
def compiler_add(args): """Search either $PATH or a list of paths for compilers and add them to Spack's configuration.""" paths = args.add_paths if not paths: paths = get_path('PATH') compilers = [c for c in spack.compilers.find_compilers(*args.add_paths) if c.spec not in spack.compilers.all_compilers()] if compilers: spack.compilers.add_compilers_to_config('user', *compilers) n = len(compilers) tty.msg("Added %d new compiler%s to %s" % ( n, 's' if n > 1 else '', spack.config.get_config_scope_filename('user', 'compilers'))) colify(reversed(sorted(c.spec for c in compilers)), indent=4) else: tty.msg("Found no new compilers")
def _get_config(): """Get a Spack config, but make sure it has compiler configuration first.""" # If any configuration file has compilers, just stick with the # ones already configured. config = spack.config.get_config() existing = [spack.spec.CompilerSpec(s) for s in config.get_section_names('compiler')] if existing: return config compilers = find_compilers(*get_path('PATH')) new_compilers = [ c for c in compilers if c.spec not in existing] add_compilers_to_config('user', *new_compilers) # After writing compilers to the user config, return a full config # from all files. return spack.config.get_config(refresh=True)
def compiler_add(args): """Search either $PATH or a list of paths for compilers and add them to Spack's configuration.""" paths = args.add_paths if not paths: paths = get_path('PATH') compilers = [ c for c in spack.compilers.find_compilers(*args.add_paths) if c.spec not in spack.compilers.all_compilers() ] if compilers: spack.compilers.add_compilers_to_config('user', *compilers) n = len(compilers) tty.msg("Added %d new compiler%s to %s" % (n, 's' if n > 1 else '', spack.config.get_filename('user'))) colify(reversed(sorted(c.spec for c in compilers)), indent=4) else: tty.msg("Found no new compilers")
def _get_config(): """Get a Spack config, but make sure it has compiler configuration first.""" # If any configuration file has compilers, just stick with the # ones already configured. config = spack.config.get_config() existing = [ spack.spec.CompilerSpec(s) for s in config.get_section_names('compiler') ] if existing: return config compilers = find_compilers(*get_path('PATH')) new_compilers = [c for c in compilers if c.spec not in existing] add_compilers_to_config('user', *new_compilers) # After writing compilers to the user config, return a full config # from all files. return spack.config.get_config(refresh=True)
def find_compilers(self, *paths): """ Return a list of compilers found in the suppied paths. This invokes the find() method for each Compiler class, and appends the compilers detected to a list. """ if not paths: paths = get_path('PATH') # Make sure path elements exist, and include /bin directories # under prefixes. filtered_path = [] for p in paths: # Eliminate symlinks and just take the real directories. p = os.path.realpath(p) if not os.path.isdir(p): continue filtered_path.append(p) # Check for a bin directory, add it if it exists bin = join_path(p, 'bin') if os.path.isdir(bin): filtered_path.append(os.path.realpath(bin)) # Once the paths are cleaned up, do a search for each type of # compiler. We can spawn a bunch of parallel searches to reduce # the overhead of spelunking all these directories. # NOTE: we import spack.compilers here to avoid init order cycles import spack.compilers types = spack.compilers.all_compiler_types() compiler_lists = parmap(lambda cmp_cls: self.find_compiler(cmp_cls, *filtered_path), types) # ensure all the version calls we made are cached in the parent # process, as well. This speeds up Spack a lot. clist = [comp for cl in compiler_lists for comp in cl] return clist
def find_compilers(self, *paths): """ Return a list of compilers found in the suppied paths. This invokes the find() method for each Compiler class, and appends the compilers detected to a list. """ if not paths: paths = get_path('PATH') # Make sure path elements exist, and include /bin directories # under prefixes. filtered_path = [] for p in paths: # Eliminate symlinks and just take the real directories. p = os.path.realpath(p) if not os.path.isdir(p): continue filtered_path.append(p) # Check for a bin directory, add it if it exists bin = join_path(p, 'bin') if os.path.isdir(bin): filtered_path.append(os.path.realpath(bin)) # Once the paths are cleaned up, do a search for each type of # compiler. We can spawn a bunch of parallel searches to reduce # the overhead of spelunking all these directories. # NOTE: we import spack.compilers here to avoid init order cycles import spack.compilers types = spack.compilers.all_compiler_types() compiler_lists = parmap(lambda cmp_cls: self.find_compiler(cmp_cls, *filtered_path), types) # ensure all the version calls we made are cached in the parent # process, as well. This speeds up Spack a lot. clist = reduce(lambda x, y: x + y, compiler_lists) return clist
def test_path_put_first(prepare_environment_for_tests): envutil.path_set('TEST_ENV_VAR', test_paths) expected = ['/usr/bin', '/new_nonsense_path/a/b'] expected.extend([p for p in test_paths if p != '/usr/bin']) envutil.path_put_first('TEST_ENV_VAR', expected) assert(envutil.get_path('TEST_ENV_VAR') == expected)
def test_get_path(prepare_environment_for_tests): os.environ['TEST_ENV_VAR'] = '/a:/b:/c/d' expected = ['/a', '/b', '/c/d'] assert(envutil.get_path('TEST_ENV_VAR') == expected)
def clean_environment(): # Stuff in here sanitizes the build environment to eliminate # anything the user has set that may interfere. We apply it immediately # unlike the other functions so it doesn't overwrite what the modules load. env = EnvironmentModifications() # Remove these vars from the environment during build because they # can affect how some packages find libraries. We want to make # sure that builds never pull in unintended external dependencies. env.unset('LD_LIBRARY_PATH') env.unset('LD_RUN_PATH') env.unset('DYLD_LIBRARY_PATH') env.unset('DYLD_FALLBACK_LIBRARY_PATH') # These vars affect how the compiler finds libraries and include dirs. env.unset('LIBRARY_PATH') env.unset('CPATH') env.unset('C_INCLUDE_PATH') env.unset('CPLUS_INCLUDE_PATH') env.unset('OBJC_INCLUDE_PATH') # On Cray "cluster" systems, unset CRAY_LD_LIBRARY_PATH to avoid # interference with Spack dependencies. # CNL requires these variables to be set (or at least some of them, # depending on the CNL version). hostarch = arch.Arch(arch.platform(), 'default_os', 'default_target') on_cray = str(hostarch.platform) == 'cray' using_cnl = re.match(r'cnl\d+', str(hostarch.os)) if on_cray and not using_cnl: env.unset('CRAY_LD_LIBRARY_PATH') for varname in os.environ.keys(): if 'PKGCONF' in varname: env.unset(varname) # Unset the following variables because they can affect installation of # Autotools and CMake packages. build_system_vars = [ 'CC', 'CFLAGS', 'CPP', 'CPPFLAGS', # C variables 'CXX', 'CCC', 'CXXFLAGS', 'CXXCPP', # C++ variables 'F77', 'FFLAGS', 'FLIBS', # Fortran77 variables 'FC', 'FCFLAGS', 'FCLIBS', # Fortran variables 'LDFLAGS', 'LIBS' # linker variables ] for v in build_system_vars: env.unset(v) # Unset mpi environment vars. These flags should only be set by # mpi providers for packages with mpi dependencies mpi_vars = ['MPICC', 'MPICXX', 'MPIFC', 'MPIF77', 'MPIF90'] for v in mpi_vars: env.unset(v) build_lang = spack.config.get('config:build_language') if build_lang: # Override language-related variables. This can be used to force # English compiler messages etc., which allows parse_log_events to # show useful matches. env.set('LC_ALL', build_lang) # Remove any macports installs from the PATH. The macports ld can # cause conflicts with the built-in linker on el capitan. Solves # assembler issues, e.g.: # suffix or operands invalid for `movq'" path = get_path('PATH') for p in path: if '/macports/' in p: env.remove_path('PATH', p) env.apply_modifications()
def set_build_environment_variables(pkg, env, dirty): """Ensure a clean install environment when we build packages. This involves unsetting pesky environment variables that may affect the build. It also involves setting environment variables used by Spack's compiler wrappers. Args: pkg: The package we are building env: The build environment dirty (bool): Skip unsetting the user's environment settings """ # Gather information about various types of dependencies build_deps = set(pkg.spec.dependencies(deptype=('build', 'test'))) link_deps = set(pkg.spec.traverse(root=False, deptype=('link'))) build_link_deps = build_deps | link_deps rpath_deps = get_rpath_deps(pkg) build_prefixes = [dep.prefix for dep in build_deps] link_prefixes = [dep.prefix for dep in link_deps] build_link_prefixes = [dep.prefix for dep in build_link_deps] rpath_prefixes = [dep.prefix for dep in rpath_deps] # add run-time dependencies of direct build-time dependencies: for build_dep in build_deps: for run_dep in build_dep.traverse(deptype='run'): build_prefixes.append(run_dep.prefix) # Filter out system paths: ['/', '/usr', '/usr/local'] # These paths can be introduced into the build when an external package # is added as a dependency. The problem with these paths is that they often # contain hundreds of other packages installed in the same directory. # If these paths come first, they can overshadow Spack installations. build_prefixes = filter_system_paths(build_prefixes) link_prefixes = filter_system_paths(link_prefixes) build_link_prefixes = filter_system_paths(build_link_prefixes) rpath_prefixes = filter_system_paths(rpath_prefixes) # Prefixes of all of the package's dependencies go in SPACK_DEPENDENCIES env.set_path(SPACK_DEPENDENCIES, build_link_prefixes) # These variables control compiler wrapper behavior env.set_path(SPACK_RPATH_DEPS, rpath_prefixes) env.set_path(SPACK_LINK_DEPS, link_prefixes) # Add dependencies to CMAKE_PREFIX_PATH env.set_path('CMAKE_PREFIX_PATH', build_link_prefixes) # Install prefix env.set(SPACK_PREFIX, pkg.prefix) # Install root prefix env.set(SPACK_INSTALL, spack.store.root) # Stuff in here sanitizes the build environment to eliminate # anything the user has set that may interfere. if not dirty: # Remove these vars from the environment during build because they # can affect how some packages find libraries. We want to make # sure that builds never pull in unintended external dependencies. env.unset('LD_LIBRARY_PATH') env.unset('LIBRARY_PATH') env.unset('CPATH') env.unset('LD_RUN_PATH') env.unset('DYLD_LIBRARY_PATH') # Remove any macports installs from the PATH. The macports ld can # cause conflicts with the built-in linker on el capitan. Solves # assembler issues, e.g.: # suffix or operands invalid for `movq'" path = get_path('PATH') for p in path: if '/macports/' in p: env.remove_path('PATH', p) # Set environment variables if specified for # the given compiler compiler = pkg.compiler environment = compiler.environment if 'set' in environment: env_to_set = environment['set'] for key, value in iteritems(env_to_set): env.set('SPACK_ENV_SET_%s' % key, value) env.set('%s' % key, value) # Let shell know which variables to set env_variables = ":".join(env_to_set.keys()) env.set('SPACK_ENV_TO_SET', env_variables) if compiler.extra_rpaths: extra_rpaths = ':'.join(compiler.extra_rpaths) env.set('SPACK_COMPILER_EXTRA_RPATHS', extra_rpaths) # Add bin directories from dependencies to the PATH for the build. for prefix in build_prefixes: for dirname in ['bin', 'bin64']: bin_dir = os.path.join(prefix, dirname) if os.path.isdir(bin_dir): env.prepend_path('PATH', bin_dir) # Add spack build environment path with compiler wrappers first in # the path. We add the compiler wrapper path, which includes default # wrappers (cc, c++, f77, f90), AND a subdirectory containing # compiler-specific symlinks. The latter ensures that builds that # are sensitive to the *name* of the compiler see the right name when # we're building with the wrappers. # # Conflicts on case-insensitive systems (like "CC" and "cc") are # handled by putting one in the <build_env_path>/case-insensitive # directory. Add that to the path too. env_paths = [] compiler_specific = os.path.join(spack.paths.build_env_path, pkg.compiler.name) for item in [spack.paths.build_env_path, compiler_specific]: env_paths.append(item) ci = os.path.join(item, 'case-insensitive') if os.path.isdir(ci): env_paths.append(ci) for item in reversed(env_paths): env.prepend_path('PATH', item) env.set_path(SPACK_ENV_PATH, env_paths) # Working directory for the spack command itself, for debug logs. if spack.config.get('config:debug'): env.set(SPACK_DEBUG, 'TRUE') env.set(SPACK_SHORT_SPEC, pkg.spec.short_spec) env.set(SPACK_DEBUG_LOG_ID, pkg.spec.format('${PACKAGE}-${HASH:7}')) env.set(SPACK_DEBUG_LOG_DIR, spack.main.spack_working_dir) # Find ccache binary and hand it to build environment if spack.config.get('config:ccache'): ccache = Executable('ccache') if not ccache: raise RuntimeError("No ccache binary found in PATH") env.set(SPACK_CCACHE_BINARY, ccache) # Add any pkgconfig directories to PKG_CONFIG_PATH for prefix in build_link_prefixes: for directory in ('lib', 'lib64', 'share'): pcdir = os.path.join(prefix, directory, 'pkgconfig') if os.path.isdir(pcdir): env.prepend_path('PKG_CONFIG_PATH', pcdir) return env